Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
52
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class MissingCastException(DbException):
57
    def __init__(self, type_, col, cause=None):
58
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
59
            +' on column: '+strings.as_tt(col), cause)
60
        self.type = type_
61
        self.col = col
62

    
63
class NameException(DbException): pass
64

    
65
class DuplicateKeyException(ConstraintException): pass
66

    
67
class NullValueException(ConstraintException): pass
68

    
69
class FunctionValueException(ExceptionWithNameValue): pass
70

    
71
class DuplicateTableException(ExceptionWithName): pass
72

    
73
class DuplicateFunctionException(ExceptionWithName): pass
74

    
75
class EmptyRowException(DbException): pass
76

    
77
##### Warnings
78

    
79
class DbWarning(UserWarning): pass
80

    
81
##### Result retrieval
82

    
83
def col_names(cur): return (col[0] for col in cur.description)
84

    
85
def rows(cur): return iter(lambda: cur.fetchone(), None)
86

    
87
def consume_rows(cur):
88
    '''Used to fetch all rows so result will be cached'''
89
    iters.consume_iter(rows(cur))
90

    
91
def next_row(cur): return rows(cur).next()
92

    
93
def row(cur):
94
    row_ = next_row(cur)
95
    consume_rows(cur)
96
    return row_
97

    
98
def next_value(cur): return next_row(cur)[0]
99

    
100
def value(cur): return row(cur)[0]
101

    
102
def values(cur): return iters.func_iter(lambda: next_value(cur))
103

    
104
def value_or_none(cur):
105
    try: return value(cur)
106
    except StopIteration: return None
107

    
108
##### Escaping
109

    
110
def esc_name_by_module(module, name):
111
    if module == 'psycopg2' or module == None: quote = '"'
112
    elif module == 'MySQLdb': quote = '`'
113
    else: raise NotImplementedError("Can't escape name for "+module+' database')
114
    return sql_gen.esc_name(name, quote)
115

    
116
def esc_name_by_engine(engine, name, **kw_args):
117
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
118

    
119
def esc_name(db, name, **kw_args):
120
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
121

    
122
def qual_name(db, schema, table):
123
    def esc_name_(name): return esc_name(db, name)
124
    table = esc_name_(table)
125
    if schema != None: return esc_name_(schema)+'.'+table
126
    else: return table
127

    
128
##### Database connections
129

    
130
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
131

    
132
db_engines = {
133
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
134
    'PostgreSQL': ('psycopg2', {}),
135
}
136

    
137
DatabaseErrors_set = set([DbException])
138
DatabaseErrors = tuple(DatabaseErrors_set)
139

    
140
def _add_module(module):
141
    DatabaseErrors_set.add(module.DatabaseError)
142
    global DatabaseErrors
143
    DatabaseErrors = tuple(DatabaseErrors_set)
144

    
145
def db_config_str(db_config):
146
    return db_config['engine']+' database '+db_config['database']
147

    
148
log_debug_none = lambda msg, level=2: None
149

    
150
class DbConn:
151
    def __init__(self, db_config, autocommit=False, caching=True,
152
        log_debug=log_debug_none, debug_temp=False):
153
        '''
154
        @param debug_temp Whether temporary objects should instead be permanent.
155
            This assists in debugging the internal objects used by the program.
156
        '''
157
        if debug_temp: autocommit = True
158
        
159
        self.db_config = db_config
160
        self.autocommit = autocommit
161
        self.caching = caching
162
        self.log_debug = log_debug
163
        self.debug = log_debug != log_debug_none
164
        self.debug_temp = debug_temp
165
        
166
        self.__db = None
167
        self.query_results = {}
168
        self._savepoint = 0
169
        self._notices_seen = set()
170
    
171
    def __getattr__(self, name):
172
        if name == '__dict__': raise Exception('getting __dict__')
173
        if name == 'db': return self._db()
174
        else: raise AttributeError()
175
    
176
    def __getstate__(self):
177
        state = copy.copy(self.__dict__) # shallow copy
178
        state['log_debug'] = None # don't pickle the debug callback
179
        state['_DbConn__db'] = None # don't pickle the connection
180
        return state
181
    
182
    def connected(self): return self.__db != None
183
    
184
    def _db(self):
185
        if self.__db == None:
186
            # Process db_config
187
            db_config = self.db_config.copy() # don't modify input!
188
            schemas = db_config.pop('schemas', None)
189
            module_name, mappings = db_engines[db_config.pop('engine')]
190
            module = __import__(module_name)
191
            _add_module(module)
192
            for orig, new in mappings.iteritems():
193
                try: util.rename_key(db_config, orig, new)
194
                except KeyError: pass
195
            
196
            # Connect
197
            self.__db = module.connect(**db_config)
198
            
199
            # Configure connection
200
            if hasattr(self.db, 'set_isolation_level'):
201
                import psycopg2.extensions
202
                self.db.set_isolation_level(
203
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
204
            if schemas != None:
205
                search_path = [self.esc_name(s) for s in schemas.split(',')]
206
                search_path.append(value(run_query(self, 'SHOW search_path',
207
                    log_level=4)))
208
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
209
                    log_level=3)
210
        
211
        return self.__db
212
    
213
    class DbCursor(Proxy):
214
        def __init__(self, outer):
215
            Proxy.__init__(self, outer.db.cursor())
216
            self.outer = outer
217
            self.query_results = outer.query_results
218
            self.query_lookup = None
219
            self.result = []
220
        
221
        def execute(self, query):
222
            self._is_insert = query.startswith('INSERT')
223
            self.query_lookup = query
224
            try:
225
                try:
226
                    cur = self.inner.execute(query)
227
                    self.outer.do_autocommit()
228
                finally: self.query = get_cur_query(self.inner, query)
229
            except Exception, e:
230
                _add_cursor_info(e, self, query)
231
                self.result = e # cache the exception as the result
232
                self._cache_result()
233
                raise
234
            if self.rowcount == 0 and query.startswith('SELECT'): # empty SELECT
235
                consume_rows(self) # fetch all rows so result will be cached
236
            return cur
237
        
238
        def fetchone(self):
239
            row = self.inner.fetchone()
240
            if row != None: self.result.append(row)
241
            # otherwise, fetched all rows
242
            else: self._cache_result()
243
            return row
244
        
245
        def _cache_result(self):
246
            # For inserts, only cache exceptions since inserts are not
247
            # idempotent, but an invalid insert will always be invalid
248
            if self.query_results != None and (not self._is_insert
249
                or isinstance(self.result, Exception)):
250
                
251
                assert self.query_lookup != None
252
                self.query_results[self.query_lookup] = self.CacheCursor(
253
                    util.dict_subset(dicts.AttrsDictView(self),
254
                    ['query', 'result', 'rowcount', 'description']))
255
        
256
        class CacheCursor:
257
            def __init__(self, cached_result): self.__dict__ = cached_result
258
            
259
            def execute(self, *args, **kw_args):
260
                if isinstance(self.result, Exception): raise self.result
261
                # otherwise, result is a rows list
262
                self.iter = iter(self.result)
263
            
264
            def fetchone(self):
265
                try: return self.iter.next()
266
                except StopIteration: return None
267
    
268
    def esc_value(self, value):
269
        try: str_ = self.mogrify('%s', [value])
270
        except NotImplementedError, e:
271
            module = util.root_module(self.db)
272
            if module == 'MySQLdb':
273
                import _mysql
274
                str_ = _mysql.escape_string(value)
275
            else: raise e
276
        return strings.to_unicode(str_)
277
    
278
    def esc_name(self, name): return esc_name(self, name) # calls global func
279
    
280
    def std_code(self, str_):
281
        '''Standardizes SQL code.
282
        * Ensures that string literals are prefixed by `E`
283
        '''
284
        if str_.startswith("'"): str_ = 'E'+str_
285
        return str_
286
    
287
    def can_mogrify(self):
288
        module = util.root_module(self.db)
289
        return module == 'psycopg2'
290
    
291
    def mogrify(self, query, params=None):
292
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
293
        else: raise NotImplementedError("Can't mogrify query")
294
    
295
    def print_notices(self):
296
        if hasattr(self.db, 'notices'):
297
            for msg in self.db.notices:
298
                if msg not in self._notices_seen:
299
                    self._notices_seen.add(msg)
300
                    self.log_debug(msg, level=2)
301
    
302
    def run_query(self, query, cacheable=False, log_level=2,
303
        debug_msg_ref=None):
304
        '''
305
        @param log_ignore_excs The log_level will be increased by 2 if the query
306
            throws one of these exceptions.
307
        @param debug_msg_ref If specified, the log message will be returned in
308
            this instead of being output. This allows you to filter log messages
309
            depending on the result of the query.
310
        '''
311
        assert query != None
312
        
313
        if not self.caching: cacheable = False
314
        used_cache = False
315
        
316
        def log_msg(query):
317
            if used_cache: cache_status = 'cache hit'
318
            elif cacheable: cache_status = 'cache miss'
319
            else: cache_status = 'non-cacheable'
320
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
321
        
322
        try:
323
            # Get cursor
324
            if cacheable:
325
                try:
326
                    cur = self.query_results[query]
327
                    used_cache = True
328
                except KeyError: cur = self.DbCursor(self)
329
            else: cur = self.db.cursor()
330
            
331
            # Log query
332
            if self.debug and debug_msg_ref == None: # log before running
333
                self.log_debug(log_msg(query), log_level)
334
            
335
            # Run query
336
            cur.execute(query)
337
        finally:
338
            self.print_notices()
339
            if self.debug and debug_msg_ref != None: # return after running
340
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
341
        
342
        return cur
343
    
344
    def is_cached(self, query): return query in self.query_results
345
    
346
    def with_autocommit(self, func):
347
        import psycopg2.extensions
348
        
349
        prev_isolation_level = self.db.isolation_level
350
        self.db.set_isolation_level(
351
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
352
        try: return func()
353
        finally: self.db.set_isolation_level(prev_isolation_level)
354
    
355
    def with_savepoint(self, func):
356
        savepoint = 'level_'+str(self._savepoint)
357
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
358
        self._savepoint += 1
359
        try:
360
            try: return_val = func()
361
            finally:
362
                self._savepoint -= 1
363
                assert self._savepoint >= 0
364
        except:
365
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
366
            raise
367
        else:
368
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
369
            self.do_autocommit()
370
            return return_val
371
    
372
    def do_autocommit(self):
373
        '''Autocommits if outside savepoint'''
374
        assert self._savepoint >= 0
375
        if self.autocommit and self._savepoint == 0:
376
            self.log_debug('Autocommitting')
377
            self.db.commit()
378
    
379
    def col_info(self, col):
380
        table = sql_gen.Table('columns', 'information_schema')
381
        cols = ['data_type', 'column_default',
382
            cast(self, 'boolean', 'is_nullable')]
383
        
384
        conds = [('table_name', col.table.name), ('column_name', col.name)]
385
        schema = col.table.schema
386
        if schema != None: conds.append(('table_schema', schema))
387
        
388
        type_, default, nullable = row(select(self, table, cols, conds,
389
            order_by='table_schema', limit=1, log_level=4))
390
            # TODO: order_by search_path schema order
391
        default = sql_gen.as_Code(default, self)
392
        
393
        return sql_gen.TypedCol(col.name, type_, default, nullable)
394

    
395
connect = DbConn
396

    
397
##### Recoverable querying
398

    
399
def with_savepoint(db, func): return db.with_savepoint(func)
400

    
401
def run_query(db, query, recover=None, cacheable=False, log_level=2,
402
    log_ignore_excs=None, **kw_args):
403
    '''For params, see DbConn.run_query()'''
404
    if recover == None: recover = False
405
    if log_ignore_excs == None: log_ignore_excs = ()
406
    log_ignore_excs = tuple(log_ignore_excs)
407
    
408
    debug_msg_ref = None # usually, db.run_query() logs query before running it
409
    # But if filtering with log_ignore_excs, wait until after exception parsing
410
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None] 
411
    
412
    try:
413
        try:
414
            def run(): return db.run_query(query, cacheable, log_level,
415
                debug_msg_ref, **kw_args)
416
            if recover and not db.is_cached(query):
417
                return with_savepoint(db, run)
418
            else: return run() # don't need savepoint if cached
419
        except Exception, e:
420
            if not recover: raise # need savepoint to run index_cols()
421
            msg = exc.str_(e)
422
            
423
            match = re.search(r'duplicate key value violates unique constraint '
424
                r'"((_?[^\W_]+)_.+?)"', msg)
425
            if match:
426
                constraint, table = match.groups()
427
                try: cols = index_cols(db, table, constraint)
428
                except NotImplementedError: raise e
429
                else: raise DuplicateKeyException(constraint, cols, e)
430
            
431
            match = re.search(r'null value in column "(.+?)" violates not-null'
432
                r' constraint', msg)
433
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
434
            
435
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
436
                r'|date/time field value out of range): "(.+?)"\n'
437
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
438
            if match:
439
                value, name = match.groups()
440
                raise FunctionValueException(name, strings.to_unicode(value), e)
441
            
442
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
443
                r'is of type', msg)
444
            if match:
445
                col, type_ = match.groups()
446
                raise MissingCastException(type_, col, e)
447
            
448
            match = re.search(r'relation "(.+?)" already exists', msg)
449
            if match: raise DuplicateTableException(match.group(1), e)
450
            
451
            match = re.search(r'function "(.+?)" already exists', msg)
452
            if match: raise DuplicateFunctionException(match.group(1), e)
453
            
454
            raise # no specific exception raised
455
    except log_ignore_excs:
456
        log_level += 2
457
        raise
458
    finally:
459
        if debug_msg_ref != None and debug_msg_ref[0] != None:
460
            db.log_debug(debug_msg_ref[0], log_level)
461

    
462
##### Basic queries
463

    
464
def next_version(name):
465
    version = 1 # first existing name was version 0
466
    match = re.match(r'^(.*)#(\d+)$', name)
467
    if match:
468
        name, version = match.groups()
469
        version = int(version)+1
470
    return sql_gen.add_suffix(name, '#'+str(version))
471

    
472
def lock_table(db, table, mode):
473
    table = sql_gen.as_Table(table)
474
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
475

    
476
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
477
    '''Outputs a query to a temp table.
478
    For params, see run_query().
479
    '''
480
    if into == None: return run_query(db, query, **kw_args)
481
    
482
    assert isinstance(into, sql_gen.Table)
483
    
484
    kw_args['recover'] = True
485
    kw_args.setdefault('log_ignore_excs', (DuplicateTableException,))
486
    
487
    temp = not db.debug_temp # tables are permanent in debug_temp mode
488
    # "temporary tables cannot specify a schema name", so remove schema
489
    if temp: into.schema = None
490
    
491
    # Create table
492
    while True:
493
        create_query = 'CREATE'
494
        if temp: create_query += ' TEMP'
495
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
496
        
497
        try:
498
            cur = run_query(db, create_query, **kw_args)
499
                # CREATE TABLE AS sets rowcount to # rows in query
500
            break
501
        except DuplicateTableException, e:
502
            into.name = next_version(into.name)
503
            # try again with next version of name
504
    
505
    if add_indexes_: add_indexes(db, into)
506
    
507
    return cur
508

    
509
order_by_pkey = object() # tells mk_select() to order by the pkey
510

    
511
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
512

    
513
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
514
    start=None, order_by=order_by_pkey, default_table=None):
515
    '''
516
    @param tables The single table to select from, or a list of tables to join
517
        together, with tables after the first being sql_gen.Join objects
518
    @param fields Use None to select all fields in the table
519
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
520
        * container can be any iterable type
521
        * compare_left_side: sql_gen.Code|str (for col name)
522
        * compare_right_side: sql_gen.ValueCond|literal value
523
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
524
        use all columns
525
    @return query
526
    '''
527
    # Parse tables param
528
    if not lists.is_seq(tables): tables = [tables]
529
    tables = list(tables) # don't modify input! (list() copies input)
530
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
531
    
532
    # Parse other params
533
    if conds == None: conds = []
534
    elif dicts.is_dict(conds): conds = conds.items()
535
    conds = list(conds) # don't modify input! (list() copies input)
536
    assert limit == None or type(limit) == int
537
    assert start == None or type(start) == int
538
    if order_by is order_by_pkey:
539
        if distinct_on != []: order_by = None
540
        else: order_by = pkey(db, table0, recover=True)
541
    
542
    query = 'SELECT'
543
    
544
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
545
    
546
    # DISTINCT ON columns
547
    if distinct_on != []:
548
        query += '\nDISTINCT'
549
        if distinct_on is not distinct_on_all:
550
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
551
    
552
    # Columns
553
    query += '\n'
554
    if fields == None: query += '*'
555
    else:
556
        assert fields != []
557
        query += '\n, '.join(map(parse_col, fields))
558
    
559
    # Main table
560
    query += '\nFROM '+table0.to_str(db)
561
    
562
    # Add joins
563
    left_table = table0
564
    for join_ in tables:
565
        table = join_.table
566
        
567
        # Parse special values
568
        if join_.type_ is sql_gen.filter_out: # filter no match
569
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
570
                sql_gen.CompareCond(None, '~=')))
571
        
572
        query += '\n'+join_.to_str(db, left_table)
573
        
574
        left_table = table
575
    
576
    missing = True
577
    if conds != []:
578
        if len(conds) == 1: whitespace = ' '
579
        else: whitespace = '\n'
580
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
581
            .to_str(db) for l, r in conds], 'WHERE')
582
        missing = False
583
    if order_by != None:
584
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
585
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
586
    if start != None:
587
        if start != 0: query += '\nOFFSET '+str(start)
588
        missing = False
589
    if missing: warnings.warn(DbWarning(
590
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
591
    
592
    return query
593

    
594
def select(db, *args, **kw_args):
595
    '''For params, see mk_select() and run_query()'''
596
    recover = kw_args.pop('recover', None)
597
    cacheable = kw_args.pop('cacheable', True)
598
    log_level = kw_args.pop('log_level', 2)
599
    
600
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
601
        log_level=log_level)
602

    
603
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
604
    embeddable=False):
605
    '''
606
    @param returning str|None An inserted column (such as pkey) to return
607
    @param embeddable Whether the query should be embeddable as a nested SELECT.
608
        Warning: If you set this and cacheable=True when the query is run, the
609
        query will be fully cached, not just if it raises an exception.
610
    '''
611
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
612
    if cols == []: cols = None # no cols (all defaults) = unknown col names
613
    if cols != None:
614
        cols = [sql_gen.to_name_only_col(v, table).to_str(db) for v in cols]
615
    if select_query == None: select_query = 'DEFAULT VALUES'
616
    if returning != None: returning = sql_gen.as_Col(returning, table)
617
    
618
    # Build query
619
    first_line = 'INSERT INTO '+table.to_str(db)
620
    query = first_line
621
    if cols != None: query += '\n('+', '.join(cols)+')'
622
    query += '\n'+select_query
623
    
624
    if returning != None:
625
        query += '\nRETURNING '+sql_gen.to_name_only_col(returning).to_str(db)
626
    
627
    if embeddable:
628
        assert returning != None
629
        
630
        # Create function
631
        function_name = sql_gen.clean_name(first_line)
632
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
633
        while True:
634
            try:
635
                function = sql_gen.TempFunction(function_name, db.debug_temp)
636
                
637
                function_query = '''\
638
CREATE FUNCTION '''+function.to_str(db)+'''()
639
RETURNS '''+return_type+'''
640
LANGUAGE sql
641
AS $$
642
'''+query+''';
643
$$;
644
'''
645
                run_query(db, function_query, recover=True, cacheable=True,
646
                    log_ignore_excs=(DuplicateFunctionException,))
647
                break # this version was successful
648
            except DuplicateFunctionException, e:
649
                function_name = next_version(function_name)
650
                # try again with next version of name
651
        
652
        # Return query that uses function
653
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
654
            [returning]) # AS clause requires function alias
655
        return mk_select(db, func_table, start=0, order_by=None)
656
    
657
    return query
658

    
659
def insert_select(db, *args, **kw_args):
660
    '''For params, see mk_insert_select() and run_query_into()
661
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
662
        values in
663
    '''
664
    into = kw_args.pop('into', None)
665
    if into != None: kw_args['embeddable'] = True
666
    recover = kw_args.pop('recover', None)
667
    cacheable = kw_args.pop('cacheable', True)
668
    log_level = kw_args.pop('log_level', 2)
669
    
670
    return run_query_into(db, mk_insert_select(db, *args, **kw_args), into,
671
        recover=recover, cacheable=cacheable, log_level=log_level)
672

    
673
default = sql_gen.default # tells insert() to use the default value for a column
674

    
675
def insert(db, table, row, *args, **kw_args):
676
    '''For params, see insert_select()'''
677
    if lists.is_seq(row): cols = None
678
    else:
679
        cols = row.keys()
680
        row = row.values()
681
    row = list(row) # ensure that "== []" works
682
    
683
    if row == []: query = None
684
    else: query = sql_gen.Values(row).to_str(db)
685
    
686
    return insert_select(db, table, cols, query, *args, **kw_args)
687

    
688
def mk_update(db, table, changes=None, cond=None):
689
    '''
690
    @param changes [(col, new_value),...]
691
        * container can be any iterable type
692
        * col: sql_gen.Code|str (for col name)
693
        * new_value: sql_gen.Code|literal value
694
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
695
    @return str query
696
    '''
697
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
698
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
699
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
700
    if cond != None: query += '\nWHERE\n'+cond.to_str(db)
701
    
702
    return query
703

    
704
def update(db, *args, **kw_args):
705
    '''For params, see mk_update() and run_query()'''
706
    recover = kw_args.pop('recover', None)
707
    
708
    return run_query(db, mk_update(db, *args, **kw_args), recover)
709

    
710
def last_insert_id(db):
711
    module = util.root_module(db.db)
712
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
713
    elif module == 'MySQLdb': return db.insert_id()
714
    else: return None
715

    
716
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
717
    '''Creates a mapping from original column names (which may have collisions)
718
    to names that will be distinct among the columns' tables.
719
    This is meant to be used for several tables that are being joined together.
720
    @param cols The columns to combine. Duplicates will be removed.
721
    @param into The table for the new columns.
722
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
723
        columns will be included in the mapping even if they are not in cols.
724
        The tables of the provided Col objects will be changed to into, so make
725
        copies of them if you want to keep the original tables.
726
    @param as_items Whether to return a list of dict items instead of a dict
727
    @return dict(orig_col=new_col, ...)
728
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
729
        * new_col: sql_gen.Col(orig_col_name, into)
730
        * All mappings use the into table so its name can easily be
731
          changed for all columns at once
732
    '''
733
    cols = lists.uniqify(cols)
734
    
735
    items = []
736
    for col in preserve:
737
        orig_col = copy.copy(col)
738
        col.table = into
739
        items.append((orig_col, col))
740
    preserve = set(preserve)
741
    for col in cols:
742
        if col not in preserve:
743
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
744
    
745
    if not as_items: items = dict(items)
746
    return items
747

    
748
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
749
    '''For params, see mk_flatten_mapping()
750
    @return See return value of mk_flatten_mapping()
751
    '''
752
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
753
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
754
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
755
        into=into, add_indexes_=True)
756
    return dict(items)
757

    
758
def mk_track_data_error(db, errors_table, cols, value, error_code, error):
759
    assert cols != ()
760
    
761
    cols = map(sql_gen.to_name_only_col, cols)
762
    
763
    columns_cols = ['column']
764
    columns = sql_gen.NamedValues('columns', columns_cols,
765
        [[c.name] for c in cols])
766
    values_cols = ['value', 'error_code', 'error']
767
    values = sql_gen.NamedValues('values', values_cols,
768
        [value, error_code, error])
769
    
770
    select_cols = columns_cols+values_cols
771
    name_only_cols = map(sql_gen.to_name_only_col, select_cols)
772
    errors_table = sql_gen.NamedTable('errors', errors_table)
773
    joins = [columns, sql_gen.Join(values, type_='CROSS'),
774
        sql_gen.Join(errors_table, dict(zip(name_only_cols, select_cols)),
775
        sql_gen.filter_out)]
776
    
777
    return mk_insert_select(db, errors_table, name_only_cols,
778
        mk_select(db, joins, select_cols, order_by=None))
779

    
780
def track_data_error(db, errors_table, cols, *args, **kw_args):
781
    '''
782
    @param errors_table If None, does nothing.
783
    '''
784
    if errors_table == None or cols == (): return
785
    run_query(db, mk_track_data_error(db, errors_table, cols, *args, **kw_args),
786
        cacheable=True, log_level=4)
787

    
788
def cast(db, type_, col, errors_table=None):
789
    '''Casts an (unrenamed) column or value.
790
    If errors_table set and col has srcs, saves errors in errors_table (using
791
    col's srcs attr as the source columns) and converts errors to warnings.
792
    @param col str|sql_gen.Col|sql_gen.Literal
793
    @param errors_table None|sql_gen.Table|str
794
    '''
795
    col = sql_gen.as_Col(col)
796
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
797
        and col.srcs != ())
798
    if not save_errors: # can't save errors
799
        return sql_gen.CustomCode(col.to_str(db)+'::'+type_) # just cast
800
    
801
    assert not isinstance(col, sql_gen.NamedCol)
802
    
803
    errors_table = sql_gen.as_Table(errors_table)
804
    srcs = map(sql_gen.to_name_only_col, col.srcs)
805
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
806
    function = sql_gen.TempFunction(function_name, db.debug_temp)
807
    
808
    while True:
809
        # Create function definition
810
        query = '''\
811
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
812
RETURNS '''+type_+'''
813
LANGUAGE plpgsql
814
STRICT
815
AS $$
816
BEGIN
817
    /* The explicit cast to the return type is needed to make the cast happen
818
    inside the try block. (Implicit casts to the return type happen at the end
819
    of the function, outside any block.) */
820
    RETURN value::'''+type_+''';
821
EXCEPTION
822
    WHEN data_exception THEN
823
        -- Save error in errors table.
824
        -- Insert the value and error for *each* source column.
825
'''+mk_track_data_error(db, errors_table, srcs,
826
    *map(sql_gen.CustomCode, ['value', 'SQLSTATE', 'SQLERRM']))+''';
827
        
828
        RAISE WARNING '%', SQLERRM;
829
        RETURN NULL;
830
END;
831
$$;
832
'''
833
        
834
        # Create function
835
        try:
836
            run_query(db, query, recover=True, cacheable=True,
837
                log_ignore_excs=(DuplicateFunctionException,))
838
            break # successful
839
        except DuplicateFunctionException:
840
            function.name = next_version(function.name)
841
            # try again with next version of name
842
    
843
    return sql_gen.FunctionCall(function, col)
844

    
845
##### Database structure queries
846

    
847
def table_row_count(db, table, recover=None):
848
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
849
        order_by=None, start=0), recover=recover, log_level=3))
850

    
851
def table_cols(db, table, recover=None):
852
    return list(col_names(select(db, table, limit=0, order_by=None,
853
        recover=recover, log_level=4)))
854

    
855
def pkey(db, table, recover=None):
856
    '''Assumed to be first column in table'''
857
    return table_cols(db, table, recover)[0]
858

    
859
not_null_col = 'not_null_col'
860

    
861
def table_not_null_col(db, table, recover=None):
862
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
863
    if not_null_col in table_cols(db, table, recover): return not_null_col
864
    else: return pkey(db, table, recover)
865

    
866
def index_cols(db, table, index):
867
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
868
    automatically created. When you don't know whether something is a UNIQUE
869
    constraint or a UNIQUE index, use this function.'''
870
    module = util.root_module(db.db)
871
    if module == 'psycopg2':
872
        return list(values(run_query(db, '''\
873
SELECT attname
874
FROM
875
(
876
        SELECT attnum, attname
877
        FROM pg_index
878
        JOIN pg_class index ON index.oid = indexrelid
879
        JOIN pg_class table_ ON table_.oid = indrelid
880
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
881
        WHERE
882
            table_.relname = '''+db.esc_value(table)+'''
883
            AND index.relname = '''+db.esc_value(index)+'''
884
    UNION
885
        SELECT attnum, attname
886
        FROM
887
        (
888
            SELECT
889
                indrelid
890
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
891
                    AS indkey
892
            FROM pg_index
893
            JOIN pg_class index ON index.oid = indexrelid
894
            JOIN pg_class table_ ON table_.oid = indrelid
895
            WHERE
896
                table_.relname = '''+db.esc_value(table)+'''
897
                AND index.relname = '''+db.esc_value(index)+'''
898
        ) s
899
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
900
) s
901
ORDER BY attnum
902
'''
903
            , cacheable=True, log_level=4)))
904
    else: raise NotImplementedError("Can't list index columns for "+module+
905
        ' database')
906

    
907
def constraint_cols(db, table, constraint):
908
    module = util.root_module(db.db)
909
    if module == 'psycopg2':
910
        return list(values(run_query(db, '''\
911
SELECT attname
912
FROM pg_constraint
913
JOIN pg_class ON pg_class.oid = conrelid
914
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
915
WHERE
916
    relname = '''+db.esc_value(table)+'''
917
    AND conname = '''+db.esc_value(constraint)+'''
918
ORDER BY attnum
919
'''
920
            )))
921
    else: raise NotImplementedError("Can't list constraint columns for "+module+
922
        ' database')
923

    
924
row_num_col = '_row_num'
925

    
926
def add_index(db, exprs, table=None, unique=False, ensure_not_null=True):
927
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
928
    Currently, only function calls are supported as expressions.
929
    @param ensure_not_null If set, translates NULL values to sentinel values.
930
        This allows indexes to be used for comparisons where NULLs are equal.
931
    '''
932
    if not lists.is_seq(exprs): exprs = [exprs]
933
    
934
    # Parse exprs
935
    old_exprs = exprs[:]
936
    exprs = []
937
    cols = []
938
    for i, expr in enumerate(old_exprs):
939
        expr = copy.deepcopy(expr) # don't modify input!
940
        expr = sql_gen.as_Col(expr, table)
941
        
942
        # Handle nullable columns
943
        if ensure_not_null:
944
            try: expr = sql_gen.ensure_not_null(db, expr)
945
            except KeyError: pass # unknown type, so just create plain index
946
        
947
        # Extract col
948
        if isinstance(expr, sql_gen.FunctionCall):
949
            col = expr.args[0]
950
            expr = sql_gen.Expr(expr)
951
        else: col = expr
952
        assert isinstance(col, sql_gen.Col)
953
        
954
        # Extract table
955
        if table == None:
956
            assert sql_gen.is_table_col(col)
957
            table = col.table
958
        
959
        col.table = None
960
        
961
        exprs.append(expr)
962
        cols.append(col)
963
    
964
    table = sql_gen.as_Table(table)
965
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
966
    
967
    str_ = 'CREATE'
968
    if unique: str_ += ' UNIQUE'
969
    str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
970
        ', '.join((v.to_str(db) for v in exprs)))+')'
971
    
972
    try: run_query(db, str_, recover=True, cacheable=True, log_level=3)
973
    except DuplicateTableException: pass # index already existed
974

    
975
def add_pkey(db, table, cols=None, recover=None):
976
    '''Adds a primary key.
977
    @param cols [sql_gen.Col,...] The columns in the primary key.
978
        Defaults to the first column in the table.
979
    @pre The table must not already have a primary key.
980
    '''
981
    table = sql_gen.as_Table(table)
982
    if cols == None: cols = [pkey(db, table, recover)]
983
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
984
    
985
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
986
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
987
        log_ignore_excs=(DuplicateTableException,))
988

    
989
already_indexed = object() # tells add_indexes() the pkey has already been added
990

    
991
def add_indexes(db, table, has_pkey=True):
992
    '''Adds an index on all columns in a table.
993
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
994
        index should be added on the first column.
995
        * If already_indexed, the pkey is assumed to have already been added
996
    '''
997
    cols = table_cols(db, table)
998
    if has_pkey:
999
        if has_pkey is not already_indexed: add_pkey(db, table)
1000
        cols = cols[1:]
1001
    for col in cols: add_index(db, col, table)
1002

    
1003
def add_row_num(db, table):
1004
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1005
    be the primary key.'''
1006
    table = sql_gen.as_Table(table).to_str(db)
1007
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
1008
        +' serial NOT NULL PRIMARY KEY', log_level=3)
1009

    
1010
def drop_table(db, table):
1011
    table = sql_gen.as_Table(table)
1012
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1013

    
1014
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
1015
    '''Creates a table.
1016
    @param cols [sql_gen.TypedCol,...] The column names and types
1017
    @param has_pkey If set, the first column becomes the primary key.
1018
    @param col_indexes bool|[ref]
1019
        * If True, indexes will be added on all non-pkey columns.
1020
        * If a list reference, [0] will be set to a function to do this.
1021
          This can be used to delay index creation until the table is populated.
1022
    '''
1023
    table = sql_gen.as_Table(table)
1024
    
1025
    if has_pkey:
1026
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1027
        pkey.constraints = 'PRIMARY KEY'
1028
    
1029
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1030
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1031
    str_ += '\n);\n'
1032
    run_query(db, str_, cacheable=True, log_level=2)
1033
    
1034
    # Add indexes
1035
    if has_pkey: has_pkey = already_indexed
1036
    def add_indexes_(): add_indexes(db, table, has_pkey)
1037
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1038
    elif col_indexes: add_indexes_() # add now
1039

    
1040
def vacuum(db, table):
1041
    table = sql_gen.as_Table(table)
1042
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1043
        log_level=3))
1044

    
1045
def truncate(db, table, schema='public'):
1046
    table = sql_gen.as_Table(table, schema)
1047
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE')
1048

    
1049
def tables(db, schema_like='public', table_like='%', exact=False):
1050
    if exact: compare = '='
1051
    else: compare = 'LIKE'
1052
    
1053
    module = util.root_module(db.db)
1054
    if module == 'psycopg2':
1055
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1056
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1057
        return values(select(db, 'pg_tables', ['tablename'], conds,
1058
            order_by='tablename', log_level=4))
1059
    elif module == 'MySQLdb':
1060
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1061
            , cacheable=True, log_level=4))
1062
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1063

    
1064
def table_exists(db, table):
1065
    table = sql_gen.as_Table(table)
1066
    return list(tables(db, table.schema, table.name, exact=True)) != []
1067

    
1068
def function_exists(db, function):
1069
    function = sql_gen.as_Function(function)
1070
    
1071
    info_table = sql_gen.Table('routines', 'information_schema')
1072
    conds = [('routine_name', function.name)]
1073
    schema = function.schema
1074
    if schema != None: conds.append(('routine_schema', schema))
1075
    # Exclude trigger functions, since they cannot be called directly
1076
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
1077
    
1078
    return list(values(select(db, info_table, ['routine_name'], conds,
1079
        order_by='routine_schema', limit=1, log_level=4))) != []
1080
        # TODO: order_by search_path schema order
1081

    
1082
def errors_table(db, table, if_exists=True):
1083
    '''
1084
    @param if_exists If set, returns None if the errors table doesn't exist
1085
    @return None|sql_gen.Table
1086
    '''
1087
    table = sql_gen.as_Table(table)
1088
    if table.srcs != (): table = table.srcs[0]
1089
    
1090
    errors_table = sql_gen.suffixed_table(table, '.errors')
1091
    if if_exists and not table_exists(db, errors_table): return None
1092
    return errors_table
1093

    
1094
##### Database management
1095

    
1096
def empty_db(db, schema='public', **kw_args):
1097
    '''For kw_args, see tables()'''
1098
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1099

    
1100
##### Heuristic queries
1101

    
1102
def put(db, table, row, pkey_=None, row_ct_ref=None):
1103
    '''Recovers from errors.
1104
    Only works under PostgreSQL (uses INSERT RETURNING).
1105
    '''
1106
    row = sql_gen.ColDict(db, table, row)
1107
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1108
    
1109
    try:
1110
        cur = insert(db, table, row, pkey_, recover=True)
1111
        if row_ct_ref != None and cur.rowcount >= 0:
1112
            row_ct_ref[0] += cur.rowcount
1113
        return value(cur)
1114
    except DuplicateKeyException, e:
1115
        row = sql_gen.ColDict(db, table,
1116
            util.dict_subset_right_join(row, e.cols))
1117
        return value(select(db, table, [pkey_], row, recover=True))
1118

    
1119
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1120
    '''Recovers from errors'''
1121
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1122
    except StopIteration:
1123
        if not create: raise
1124
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1125

    
1126
def is_func_result(col):
1127
    return col.table.name.find('(') >= 0 and col.name == 'result'
1128

    
1129
def into_table_name(out_table, in_tables0, mapping, is_func):
1130
    def in_col_str(in_col):
1131
        in_col = sql_gen.remove_col_rename(in_col)
1132
        if isinstance(in_col, sql_gen.Col):
1133
            table = in_col.table
1134
            if table == in_tables0:
1135
                in_col = sql_gen.to_name_only_col(in_col)
1136
            elif is_func_result(in_col): in_col = table # omit col name
1137
        return str(in_col)
1138
    
1139
    str_ = str(out_table)
1140
    if is_func:
1141
        str_ += '('
1142
        
1143
        try: value_in_col = mapping['value']
1144
        except KeyError:
1145
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1146
                for k, v in mapping.iteritems()))
1147
        else: str_ += in_col_str(value_in_col)
1148
        
1149
        str_ += ')'
1150
    else:
1151
        out_col = 'rank'
1152
        try: in_col = mapping[out_col]
1153
        except KeyError: str_ += '_pkeys'
1154
        else: # has a rank column, so hierarchical
1155
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
1156
    return str_
1157

    
1158
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1159
    default=None, is_func=False, on_error=exc.raise_):
1160
    '''Recovers from errors.
1161
    Only works under PostgreSQL (uses INSERT RETURNING).
1162
    IMPORTANT: Must be run at the *beginning* of a transaction.
1163
    @param in_tables The main input table to select from, followed by a list of
1164
        tables to join with it using the main input table's pkey
1165
    @param mapping dict(out_table_col=in_table_col, ...)
1166
        * out_table_col: str (*not* sql_gen.Col)
1167
        * in_table_col: sql_gen.Col|literal-value
1168
    @param into The table to contain the output and input pkeys.
1169
        Defaults to `out_table.name+'_pkeys'`.
1170
    @param default The *output* column to use as the pkey for missing rows.
1171
        If this output column does not exist in the mapping, uses None.
1172
    @param is_func Whether out_table is the name of a SQL function, not a table
1173
    @return sql_gen.Col Where the output pkeys are made available
1174
    '''
1175
    out_table = sql_gen.as_Table(out_table)
1176
    
1177
    def log_debug(msg): db.log_debug(msg, level=1.5)
1178
    def col_ustr(str_):
1179
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1180
    
1181
    log_debug('********** New iteration **********')
1182
    log_debug('Inserting these input columns into '+strings.as_tt(
1183
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1184
    
1185
    log_debug('Locking output table to prevent concurrent duplicate keys')
1186
    # Must happen before any SELECT query on the table to avoid lock upgrades
1187
    lock_table(db, out_table, 'EXCLUSIVE')
1188
    
1189
    out_pkey = pkey(db, out_table, recover=True)
1190
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1191
    
1192
    if mapping == {}: # need at least one column for INSERT SELECT
1193
        mapping = {out_pkey: None} # ColDict will replace with default value
1194
    
1195
    # Create input joins from list of input tables
1196
    in_tables_ = in_tables[:] # don't modify input!
1197
    in_tables0 = in_tables_.pop(0) # first table is separate
1198
    errors_table_ = errors_table(db, in_tables0)
1199
    in_pkey = pkey(db, in_tables0, recover=True)
1200
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1201
    input_joins = [in_tables0]+[sql_gen.Join(v,
1202
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1203
    
1204
    if into == None:
1205
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1206
    into = sql_gen.as_Table(into)
1207
    
1208
    # Set column sources
1209
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1210
    for col in in_cols:
1211
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1212
    
1213
    log_debug('Joining together input tables into temp table')
1214
    # Place in new table for speed and so don't modify input if values edited
1215
    in_table = sql_gen.Table('in')
1216
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1217
        preserve=[in_pkey_col], start=0))
1218
    input_joins = [in_table]
1219
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1220
    
1221
    mapping = sql_gen.ColDict(db, out_table, mapping)
1222
        # after applying dicts.join() because that returns a plain dict
1223
    
1224
    # Resolve default value column
1225
    if default != None:
1226
        try: default = mapping[default]
1227
        except KeyError:
1228
            db.log_debug('Default value column '
1229
                +strings.as_tt(strings.repr_no_u(default))
1230
                +' does not exist in mapping, falling back to None', level=2.1)
1231
            default = None
1232
    
1233
    pkeys_names = [in_pkey, out_pkey]
1234
    pkeys_cols = [in_pkey_col, out_pkey_col]
1235
    
1236
    pkeys_table_exists_ref = [False]
1237
    def insert_into_pkeys(joins, cols):
1238
        query = mk_select(db, joins, cols, order_by=None, start=0)
1239
        if pkeys_table_exists_ref[0]:
1240
            insert_select(db, into, pkeys_names, query)
1241
        else:
1242
            run_query_into(db, query, into=into)
1243
            pkeys_table_exists_ref[0] = True
1244
    
1245
    limit_ref = [None]
1246
    conds = set()
1247
    distinct_on = sql_gen.ColDict(db, out_table)
1248
    def mk_main_select(joins, cols):
1249
        distinct_on_cols = [c.to_Col() for c in distinct_on.values()]
1250
        return mk_select(db, joins, cols, conds, distinct_on_cols,
1251
            limit=limit_ref[0], start=0)
1252
    
1253
    exc_strs = set()
1254
    def log_exc(e):
1255
        e_str = exc.str_(e, first_line_only=True)
1256
        log_debug('Caught exception: '+e_str)
1257
        assert e_str not in exc_strs # avoid infinite loops
1258
        exc_strs.add(e_str)
1259
    
1260
    def remove_all_rows():
1261
        log_debug('Ignoring all rows')
1262
        limit_ref[0] = 0 # just create an empty pkeys table
1263
    
1264
    def ignore(in_col, value, e):
1265
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1266
            e.cause.pgerror)
1267
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
1268
            +strings.as_tt(repr(value)))
1269
    
1270
    def remove_rows(in_col, value, e):
1271
        ignore(in_col, value, e)
1272
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1273
        assert cond not in conds # avoid infinite loops
1274
        conds.add(cond)
1275
    
1276
    def invalid2null(in_col, value, e):
1277
        ignore(in_col, value, e)
1278
        update(db, in_table, [(in_col, None)],
1279
            sql_gen.ColValueCond(in_col, value))
1280
    
1281
    def insert_pkeys_table(which):
1282
        return sql_gen.Table(sql_gen.add_suffix(in_table.name,
1283
            '_insert_'+which+'_pkeys'))
1284
    insert_out_pkeys = insert_pkeys_table('out')
1285
    insert_in_pkeys = insert_pkeys_table('in')
1286
    
1287
    # Do inserts and selects
1288
    join_cols = sql_gen.ColDict(db, out_table)
1289
    while True:
1290
        if limit_ref[0] == 0: # special case
1291
            log_debug('Creating an empty pkeys table')
1292
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1293
                limit=limit_ref[0]), into=insert_out_pkeys)
1294
            break # don't do main case
1295
        
1296
        has_joins = join_cols != {}
1297
        
1298
        log_debug('Trying to insert new rows')
1299
        
1300
        # Prepare to insert new rows
1301
        insert_joins = input_joins[:] # don't modify original!
1302
        insert_args = dict(recover=True, cacheable=False)
1303
        if has_joins:
1304
            insert_joins.append(sql_gen.Join(out_table, join_cols,
1305
                sql_gen.filter_out))
1306
        else:
1307
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1308
        main_select = mk_main_select(insert_joins, mapping.values())
1309
        
1310
        try:
1311
            cur = insert_select(db, out_table, mapping.keys(), main_select,
1312
                **insert_args)
1313
            break # insert successful
1314
        except DuplicateKeyException, e:
1315
            log_exc(e)
1316
            
1317
            old_join_cols = join_cols.copy()
1318
            distinct_on.update(util.dict_subset(mapping, e.cols))
1319
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1320
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1321
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1322
            assert join_cols != old_join_cols # avoid infinite loops
1323
        except NullValueException, e:
1324
            log_exc(e)
1325
            
1326
            out_col, = e.cols
1327
            try: in_col = mapping[out_col]
1328
            except KeyError:
1329
                log_debug('Missing mapping for NOT NULL column '+out_col)
1330
                remove_all_rows()
1331
            else: remove_rows(in_col, None, e)
1332
        except FunctionValueException, e:
1333
            log_exc(e)
1334
            
1335
            func_name = e.name
1336
            value = e.value
1337
            for out_col, in_col in mapping.iteritems():
1338
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1339
                invalid2null(in_col, value, e)
1340
        except MissingCastException, e:
1341
            log_exc(e)
1342
            
1343
            out_col = e.col
1344
            type_ = e.type
1345
            
1346
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1347
                +strings.as_tt(type_))
1348
            def wrap_func(col): return cast(db, type_, col, errors_table_)
1349
            mapping[out_col] = sql_gen.wrap(wrap_func, mapping[out_col])
1350
        except DatabaseErrors, e:
1351
            log_exc(e)
1352
            
1353
            log_debug('No handler for exception')
1354
            on_error(e)
1355
            remove_all_rows()
1356
        # after exception handled, rerun loop with additional constraints
1357
    
1358
    if row_ct_ref != None and cur.rowcount >= 0:
1359
        row_ct_ref[0] += cur.rowcount
1360
    
1361
    if has_joins:
1362
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1363
        log_debug('Getting output table pkeys of existing/inserted rows')
1364
        insert_into_pkeys(select_joins, pkeys_cols)
1365
    else:
1366
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1367
        
1368
        log_debug('Getting input table pkeys of inserted rows')
1369
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1370
            into=insert_in_pkeys)
1371
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1372
        
1373
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1374
            insert_in_pkeys)
1375
        
1376
        log_debug('Combining output and input pkeys in inserted order')
1377
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1378
            {row_num_col: sql_gen.join_same_not_null})]
1379
        insert_into_pkeys(pkey_joins, pkeys_names)
1380
    
1381
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1382
    add_pkey(db, into)
1383
    
1384
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1385
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1386
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1387
        # must use join_same_not_null or query will take forever
1388
    insert_into_pkeys(missing_rows_joins,
1389
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1390
    
1391
    assert table_row_count(db, into) == table_row_count(db, in_table)
1392
    
1393
    srcs = []
1394
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1395
    return sql_gen.Col(out_pkey, into, srcs)
1396

    
1397
##### Data cleanup
1398

    
1399
def cleanup_table(db, table, cols):
1400
    table = sql_gen.as_Table(table)
1401
    cols = map(sql_gen.as_Col, cols)
1402
    
1403
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1404
        +db.esc_value(r'\N')+')')
1405
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1406
        for v in cols]
1407
    
1408
    update(db, table, changes)
(24-24/36)