Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ExceptionWithNameType(DbException):
50
    def __init__(self, type_, name, cause=None):
51
        DbException.__init__(self, 'for type: '+strings.as_tt(str(type_))
52
            +'; name: '+strings.as_tt(name), cause)
53
        self.type = type_
54
        self.name = name
55

    
56
class ConstraintException(DbException):
57
    def __init__(self, name, cols, cause=None):
58
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
59
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
60
        self.name = name
61
        self.cols = cols
62

    
63
class MissingCastException(DbException):
64
    def __init__(self, type_, col, cause=None):
65
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
66
            +' on column: '+strings.as_tt(col), cause)
67
        self.type = type_
68
        self.col = col
69

    
70
class NameException(DbException): pass
71

    
72
class DuplicateKeyException(ConstraintException): pass
73

    
74
class NullValueException(ConstraintException): pass
75

    
76
class FunctionValueException(ExceptionWithNameValue): pass
77

    
78
class DuplicateException(ExceptionWithNameType): pass
79

    
80
class EmptyRowException(DbException): pass
81

    
82
##### Warnings
83

    
84
class DbWarning(UserWarning): pass
85

    
86
##### Result retrieval
87

    
88
def col_names(cur): return (col[0] for col in cur.description)
89

    
90
def rows(cur): return iter(lambda: cur.fetchone(), None)
91

    
92
def consume_rows(cur):
93
    '''Used to fetch all rows so result will be cached'''
94
    iters.consume_iter(rows(cur))
95

    
96
def next_row(cur): return rows(cur).next()
97

    
98
def row(cur):
99
    row_ = next_row(cur)
100
    consume_rows(cur)
101
    return row_
102

    
103
def next_value(cur): return next_row(cur)[0]
104

    
105
def value(cur): return row(cur)[0]
106

    
107
def values(cur): return iters.func_iter(lambda: next_value(cur))
108

    
109
def value_or_none(cur):
110
    try: return value(cur)
111
    except StopIteration: return None
112

    
113
##### Escaping
114

    
115
def esc_name_by_module(module, name):
116
    if module == 'psycopg2' or module == None: quote = '"'
117
    elif module == 'MySQLdb': quote = '`'
118
    else: raise NotImplementedError("Can't escape name for "+module+' database')
119
    return sql_gen.esc_name(name, quote)
120

    
121
def esc_name_by_engine(engine, name, **kw_args):
122
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
123

    
124
def esc_name(db, name, **kw_args):
125
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
126

    
127
def qual_name(db, schema, table):
128
    def esc_name_(name): return esc_name(db, name)
129
    table = esc_name_(table)
130
    if schema != None: return esc_name_(schema)+'.'+table
131
    else: return table
132

    
133
##### Database connections
134

    
135
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
136

    
137
db_engines = {
138
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
139
    'PostgreSQL': ('psycopg2', {}),
140
}
141

    
142
DatabaseErrors_set = set([DbException])
143
DatabaseErrors = tuple(DatabaseErrors_set)
144

    
145
def _add_module(module):
146
    DatabaseErrors_set.add(module.DatabaseError)
147
    global DatabaseErrors
148
    DatabaseErrors = tuple(DatabaseErrors_set)
149

    
150
def db_config_str(db_config):
151
    return db_config['engine']+' database '+db_config['database']
152

    
153
log_debug_none = lambda msg, level=2: None
154

    
155
class DbConn:
156
    def __init__(self, db_config, autocommit=True, caching=True,
157
        log_debug=log_debug_none, debug_temp=False):
158
        '''
159
        @param debug_temp Whether temporary objects should instead be permanent.
160
            This assists in debugging the internal objects used by the program.
161
        '''
162
        self.db_config = db_config
163
        self.autocommit = autocommit
164
        self.caching = caching
165
        self.log_debug = log_debug
166
        self.debug = log_debug != log_debug_none
167
        self.debug_temp = debug_temp
168
        
169
        self.__db = None
170
        self.query_results = {}
171
        self._savepoint = 0
172
        self._notices_seen = set()
173
    
174
    def __getattr__(self, name):
175
        if name == '__dict__': raise Exception('getting __dict__')
176
        if name == 'db': return self._db()
177
        else: raise AttributeError()
178
    
179
    def __getstate__(self):
180
        state = copy.copy(self.__dict__) # shallow copy
181
        state['log_debug'] = None # don't pickle the debug callback
182
        state['_DbConn__db'] = None # don't pickle the connection
183
        return state
184
    
185
    def connected(self): return self.__db != None
186
    
187
    def _db(self):
188
        if self.__db == None:
189
            # Process db_config
190
            db_config = self.db_config.copy() # don't modify input!
191
            schemas = db_config.pop('schemas', None)
192
            module_name, mappings = db_engines[db_config.pop('engine')]
193
            module = __import__(module_name)
194
            _add_module(module)
195
            for orig, new in mappings.iteritems():
196
                try: util.rename_key(db_config, orig, new)
197
                except KeyError: pass
198
            
199
            # Connect
200
            self.__db = module.connect(**db_config)
201
            
202
            # Configure connection
203
            if hasattr(self.db, 'set_isolation_level'):
204
                import psycopg2.extensions
205
                self.db.set_isolation_level(
206
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
207
            if schemas != None:
208
                search_path = [self.esc_name(s) for s in schemas.split(',')]
209
                search_path.append(value(run_query(self, 'SHOW search_path',
210
                    log_level=4)))
211
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
212
                    log_level=3)
213
        
214
        return self.__db
215
    
216
    class DbCursor(Proxy):
217
        def __init__(self, outer):
218
            Proxy.__init__(self, outer.db.cursor())
219
            self.outer = outer
220
            self.query_results = outer.query_results
221
            self.query_lookup = None
222
            self.result = []
223
        
224
        def execute(self, query):
225
            self._is_insert = query.startswith('INSERT')
226
            self.query_lookup = query
227
            try:
228
                try:
229
                    cur = self.inner.execute(query)
230
                    self.outer.do_autocommit()
231
                finally: self.query = get_cur_query(self.inner, query)
232
            except Exception, e:
233
                _add_cursor_info(e, self, query)
234
                self.result = e # cache the exception as the result
235
                self._cache_result()
236
                raise
237
            if self.rowcount == 0 and query.startswith('SELECT'): # empty SELECT
238
                consume_rows(self) # fetch all rows so result will be cached
239
            return cur
240
        
241
        def fetchone(self):
242
            row = self.inner.fetchone()
243
            if row != None: self.result.append(row)
244
            # otherwise, fetched all rows
245
            else: self._cache_result()
246
            return row
247
        
248
        def _cache_result(self):
249
            # For inserts that return a result set, don't cache result set since
250
            # inserts are not idempotent. Other non-SELECT queries don't have
251
            # their result set read, so only exceptions will be cached (an
252
            # invalid query will always be invalid).
253
            if self.query_results != None and (not self._is_insert
254
                or isinstance(self.result, Exception)):
255
                
256
                assert self.query_lookup != None
257
                self.query_results[self.query_lookup] = self.CacheCursor(
258
                    util.dict_subset(dicts.AttrsDictView(self),
259
                    ['query', 'result', 'rowcount', 'description']))
260
        
261
        class CacheCursor:
262
            def __init__(self, cached_result): self.__dict__ = cached_result
263
            
264
            def execute(self, *args, **kw_args):
265
                if isinstance(self.result, Exception): raise self.result
266
                # otherwise, result is a rows list
267
                self.iter = iter(self.result)
268
            
269
            def fetchone(self):
270
                try: return self.iter.next()
271
                except StopIteration: return None
272
    
273
    def esc_value(self, value):
274
        try: str_ = self.mogrify('%s', [value])
275
        except NotImplementedError, e:
276
            module = util.root_module(self.db)
277
            if module == 'MySQLdb':
278
                import _mysql
279
                str_ = _mysql.escape_string(value)
280
            else: raise e
281
        return strings.to_unicode(str_)
282
    
283
    def esc_name(self, name): return esc_name(self, name) # calls global func
284
    
285
    def std_code(self, str_):
286
        '''Standardizes SQL code.
287
        * Ensures that string literals are prefixed by `E`
288
        '''
289
        if str_.startswith("'"): str_ = 'E'+str_
290
        return str_
291
    
292
    def can_mogrify(self):
293
        module = util.root_module(self.db)
294
        return module == 'psycopg2'
295
    
296
    def mogrify(self, query, params=None):
297
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
298
        else: raise NotImplementedError("Can't mogrify query")
299
    
300
    def print_notices(self):
301
        if hasattr(self.db, 'notices'):
302
            for msg in self.db.notices:
303
                if msg not in self._notices_seen:
304
                    self._notices_seen.add(msg)
305
                    self.log_debug(msg, level=2)
306
    
307
    def run_query(self, query, cacheable=False, log_level=2,
308
        debug_msg_ref=None):
309
        '''
310
        @param log_ignore_excs The log_level will be increased by 2 if the query
311
            throws one of these exceptions.
312
        @param debug_msg_ref If specified, the log message will be returned in
313
            this instead of being output. This allows you to filter log messages
314
            depending on the result of the query.
315
        '''
316
        assert query != None
317
        
318
        if not self.caching: cacheable = False
319
        used_cache = False
320
        
321
        def log_msg(query):
322
            if used_cache: cache_status = 'cache hit'
323
            elif cacheable: cache_status = 'cache miss'
324
            else: cache_status = 'non-cacheable'
325
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
326
        
327
        try:
328
            # Get cursor
329
            if cacheable:
330
                try:
331
                    cur = self.query_results[query]
332
                    used_cache = True
333
                except KeyError: cur = self.DbCursor(self)
334
            else: cur = self.db.cursor()
335
            
336
            # Log query
337
            if self.debug and debug_msg_ref == None: # log before running
338
                self.log_debug(log_msg(query), log_level)
339
            
340
            # Run query
341
            cur.execute(query)
342
        finally:
343
            self.print_notices()
344
            if self.debug and debug_msg_ref != None: # return after running
345
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
346
        
347
        return cur
348
    
349
    def is_cached(self, query): return query in self.query_results
350
    
351
    def with_autocommit(self, func):
352
        import psycopg2.extensions
353
        
354
        prev_isolation_level = self.db.isolation_level
355
        self.db.set_isolation_level(
356
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
357
        try: return func()
358
        finally: self.db.set_isolation_level(prev_isolation_level)
359
    
360
    def with_savepoint(self, func):
361
        savepoint = 'level_'+str(self._savepoint)
362
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
363
        self._savepoint += 1
364
        try: return func()
365
        except:
366
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
367
            raise
368
        finally:
369
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
370
            # "The savepoint remains valid and can be rolled back to again"
371
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
372
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
373
            
374
            self._savepoint -= 1
375
            assert self._savepoint >= 0
376
            
377
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
378
    
379
    def do_autocommit(self):
380
        '''Autocommits if outside savepoint'''
381
        assert self._savepoint >= 0
382
        if self.autocommit and self._savepoint == 0:
383
            self.log_debug('Autocommitting', level=4)
384
            self.db.commit()
385
    
386
    def col_info(self, col):
387
        table = sql_gen.Table('columns', 'information_schema')
388
        cols = ['data_type', 'column_default',
389
            cast(self, 'boolean', 'is_nullable')]
390
        
391
        conds = [('table_name', col.table.name), ('column_name', col.name)]
392
        schema = col.table.schema
393
        if schema != None: conds.append(('table_schema', schema))
394
        
395
        type_, default, nullable = row(select(self, table, cols, conds,
396
            order_by='table_schema', limit=1, log_level=4))
397
            # TODO: order_by search_path schema order
398
        default = sql_gen.as_Code(default, self)
399
        
400
        return sql_gen.TypedCol(col.name, type_, default, nullable)
401
    
402
    def TempFunction(self, name):
403
        if self.debug_temp: schema = None
404
        else: schema = 'pg_temp'
405
        return sql_gen.Function(name, schema)
406

    
407
connect = DbConn
408

    
409
##### Recoverable querying
410

    
411
def with_savepoint(db, func): return db.with_savepoint(func)
412

    
413
def run_query(db, query, recover=None, cacheable=False, log_level=2,
414
    log_ignore_excs=None, **kw_args):
415
    '''For params, see DbConn.run_query()'''
416
    if recover == None: recover = False
417
    if log_ignore_excs == None: log_ignore_excs = ()
418
    log_ignore_excs = tuple(log_ignore_excs)
419
    
420
    debug_msg_ref = None # usually, db.run_query() logs query before running it
421
    # But if filtering with log_ignore_excs, wait until after exception parsing
422
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None] 
423
    
424
    try:
425
        try:
426
            def run(): return db.run_query(query, cacheable, log_level,
427
                debug_msg_ref, **kw_args)
428
            if recover and not db.is_cached(query):
429
                return with_savepoint(db, run)
430
            else: return run() # don't need savepoint if cached
431
        except Exception, e:
432
            if not recover: raise # need savepoint to run index_cols()
433
            msg = exc.str_(e)
434
            
435
            match = re.search(r'duplicate key value violates unique constraint '
436
                r'"((_?[^\W_]+)_.+?)"', msg)
437
            if match:
438
                constraint, table = match.groups()
439
                try: cols = index_cols(db, table, constraint)
440
                except NotImplementedError: raise e
441
                else: raise DuplicateKeyException(constraint, cols, e)
442
            
443
            match = re.search(r'null value in column "(.+?)" violates not-null'
444
                r' constraint', msg)
445
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
446
            
447
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
448
                r'|date/time field value out of range): "(.+?)"\n'
449
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
450
            if match:
451
                value, name = match.groups()
452
                raise FunctionValueException(name, strings.to_unicode(value), e)
453
            
454
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
455
                r'is of type', msg)
456
            if match:
457
                col, type_ = match.groups()
458
                raise MissingCastException(type_, col, e)
459
            
460
            match = re.search(r'\b(\S+) "(.+?)".*? already exists', msg)
461
            if match:
462
                type_, name = match.groups()
463
                raise DuplicateException(type_, name, e)
464
            
465
            raise # no specific exception raised
466
    except log_ignore_excs:
467
        log_level += 2
468
        raise
469
    finally:
470
        if debug_msg_ref != None and debug_msg_ref[0] != None:
471
            db.log_debug(debug_msg_ref[0], log_level)
472

    
473
##### Basic queries
474

    
475
def next_version(name):
476
    version = 1 # first existing name was version 0
477
    match = re.match(r'^(.*)#(\d+)$', name)
478
    if match:
479
        name, version = match.groups()
480
        version = int(version)+1
481
    return sql_gen.concat(name, '#'+str(version))
482

    
483
def lock_table(db, table, mode):
484
    table = sql_gen.as_Table(table)
485
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
486

    
487
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
488
    '''Outputs a query to a temp table.
489
    For params, see run_query().
490
    '''
491
    if into == None: return run_query(db, query, **kw_args)
492
    
493
    assert isinstance(into, sql_gen.Table)
494
    
495
    kw_args['recover'] = True
496
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
497
    
498
    temp = not db.debug_temp # tables are permanent in debug_temp mode
499
    # "temporary tables cannot specify a schema name", so remove schema
500
    if temp: into.schema = None
501
    
502
    # Create table
503
    while True:
504
        create_query = 'CREATE'
505
        if temp: create_query += ' TEMP'
506
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
507
        
508
        try:
509
            cur = run_query(db, create_query, **kw_args)
510
                # CREATE TABLE AS sets rowcount to # rows in query
511
            break
512
        except DuplicateException, e:
513
            into.name = next_version(into.name)
514
            # try again with next version of name
515
    
516
    if add_indexes_: add_indexes(db, into)
517
    
518
    return cur
519

    
520
order_by_pkey = object() # tells mk_select() to order by the pkey
521

    
522
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
523

    
524
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
525
    start=None, order_by=order_by_pkey, default_table=None):
526
    '''
527
    @param tables The single table to select from, or a list of tables to join
528
        together, with tables after the first being sql_gen.Join objects
529
    @param fields Use None to select all fields in the table
530
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
531
        * container can be any iterable type
532
        * compare_left_side: sql_gen.Code|str (for col name)
533
        * compare_right_side: sql_gen.ValueCond|literal value
534
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
535
        use all columns
536
    @return query
537
    '''
538
    # Parse tables param
539
    tables = lists.mk_seq(tables)
540
    tables = list(tables) # don't modify input! (list() copies input)
541
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
542
    
543
    # Parse other params
544
    if conds == None: conds = []
545
    elif dicts.is_dict(conds): conds = conds.items()
546
    conds = list(conds) # don't modify input! (list() copies input)
547
    assert limit == None or type(limit) == int
548
    assert start == None or type(start) == int
549
    if order_by is order_by_pkey:
550
        if distinct_on != []: order_by = None
551
        else: order_by = pkey(db, table0, recover=True)
552
    
553
    query = 'SELECT'
554
    
555
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
556
    
557
    # DISTINCT ON columns
558
    if distinct_on != []:
559
        query += '\nDISTINCT'
560
        if distinct_on is not distinct_on_all:
561
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
562
    
563
    # Columns
564
    query += '\n'
565
    if fields == None: query += '*'
566
    else:
567
        assert fields != []
568
        query += '\n, '.join(map(parse_col, fields))
569
    
570
    # Main table
571
    query += '\nFROM '+table0.to_str(db)
572
    
573
    # Add joins
574
    left_table = table0
575
    for join_ in tables:
576
        table = join_.table
577
        
578
        # Parse special values
579
        if join_.type_ is sql_gen.filter_out: # filter no match
580
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
581
                sql_gen.CompareCond(None, '~=')))
582
        
583
        query += '\n'+join_.to_str(db, left_table)
584
        
585
        left_table = table
586
    
587
    missing = True
588
    if conds != []:
589
        if len(conds) == 1: whitespace = ' '
590
        else: whitespace = '\n'
591
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
592
            .to_str(db) for l, r in conds], 'WHERE')
593
        missing = False
594
    if order_by != None:
595
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
596
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
597
    if start != None:
598
        if start != 0: query += '\nOFFSET '+str(start)
599
        missing = False
600
    if missing: warnings.warn(DbWarning(
601
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
602
    
603
    return query
604

    
605
def select(db, *args, **kw_args):
606
    '''For params, see mk_select() and run_query()'''
607
    recover = kw_args.pop('recover', None)
608
    cacheable = kw_args.pop('cacheable', True)
609
    log_level = kw_args.pop('log_level', 2)
610
    
611
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
612
        log_level=log_level)
613

    
614
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
615
    embeddable=False):
616
    '''
617
    @param returning str|None An inserted column (such as pkey) to return
618
    @param embeddable Whether the query should be embeddable as a nested SELECT.
619
        Warning: If you set this and cacheable=True when the query is run, the
620
        query will be fully cached, not just if it raises an exception.
621
    '''
622
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
623
    if cols == []: cols = None # no cols (all defaults) = unknown col names
624
    if cols != None:
625
        cols = [sql_gen.to_name_only_col(v, table).to_str(db) for v in cols]
626
    if select_query == None: select_query = 'DEFAULT VALUES'
627
    if returning != None: returning = sql_gen.as_Col(returning, table)
628
    
629
    # Build query
630
    first_line = 'INSERT INTO '+table.to_str(db)
631
    query = first_line
632
    if cols != None: query += '\n('+', '.join(cols)+')'
633
    query += '\n'+select_query
634
    
635
    if returning != None:
636
        query += '\nRETURNING '+sql_gen.to_name_only_col(returning).to_str(db)
637
    
638
    if embeddable:
639
        assert returning != None
640
        
641
        # Create function
642
        function_name = sql_gen.clean_name(first_line)
643
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
644
        while True:
645
            try:
646
                function = db.TempFunction(function_name)
647
                
648
                function_query = '''\
649
CREATE FUNCTION '''+function.to_str(db)+'''()
650
RETURNS '''+return_type+'''
651
LANGUAGE sql
652
AS $$
653
'''+query+''';
654
$$;
655
'''
656
                run_query(db, function_query, recover=True, cacheable=True,
657
                    log_ignore_excs=(DuplicateException,))
658
                break # this version was successful
659
            except DuplicateException, e:
660
                function_name = next_version(function_name)
661
                # try again with next version of name
662
        
663
        # Return query that uses function
664
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
665
            [returning]) # AS clause requires function alias
666
        return mk_select(db, func_table, start=0, order_by=None)
667
    
668
    return query
669

    
670
def insert_select(db, *args, **kw_args):
671
    '''For params, see mk_insert_select() and run_query_into()
672
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
673
        values in
674
    '''
675
    into = kw_args.pop('into', None)
676
    if into != None: kw_args['embeddable'] = True
677
    recover = kw_args.pop('recover', None)
678
    cacheable = kw_args.pop('cacheable', True)
679
    log_level = kw_args.pop('log_level', 2)
680
    
681
    return run_query_into(db, mk_insert_select(db, *args, **kw_args), into,
682
        recover=recover, cacheable=cacheable, log_level=log_level)
683

    
684
default = sql_gen.default # tells insert() to use the default value for a column
685

    
686
def insert(db, table, row, *args, **kw_args):
687
    '''For params, see insert_select()'''
688
    if lists.is_seq(row): cols = None
689
    else:
690
        cols = row.keys()
691
        row = row.values()
692
    row = list(row) # ensure that "== []" works
693
    
694
    if row == []: query = None
695
    else: query = sql_gen.Values(row).to_str(db)
696
    
697
    return insert_select(db, table, cols, query, *args, **kw_args)
698

    
699
def mk_update(db, table, changes=None, cond=None):
700
    '''
701
    @param changes [(col, new_value),...]
702
        * container can be any iterable type
703
        * col: sql_gen.Code|str (for col name)
704
        * new_value: sql_gen.Code|literal value
705
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
706
    @return str query
707
    '''
708
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
709
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
710
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
711
    if cond != None: query += '\nWHERE\n'+cond.to_str(db)
712
    
713
    return query
714

    
715
def update(db, *args, **kw_args):
716
    '''For params, see mk_update() and run_query()'''
717
    recover = kw_args.pop('recover', None)
718
    
719
    return run_query(db, mk_update(db, *args, **kw_args), recover)
720

    
721
def last_insert_id(db):
722
    module = util.root_module(db.db)
723
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
724
    elif module == 'MySQLdb': return db.insert_id()
725
    else: return None
726

    
727
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
728
    '''Creates a mapping from original column names (which may have collisions)
729
    to names that will be distinct among the columns' tables.
730
    This is meant to be used for several tables that are being joined together.
731
    @param cols The columns to combine. Duplicates will be removed.
732
    @param into The table for the new columns.
733
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
734
        columns will be included in the mapping even if they are not in cols.
735
        The tables of the provided Col objects will be changed to into, so make
736
        copies of them if you want to keep the original tables.
737
    @param as_items Whether to return a list of dict items instead of a dict
738
    @return dict(orig_col=new_col, ...)
739
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
740
        * new_col: sql_gen.Col(orig_col_name, into)
741
        * All mappings use the into table so its name can easily be
742
          changed for all columns at once
743
    '''
744
    cols = lists.uniqify(cols)
745
    
746
    items = []
747
    for col in preserve:
748
        orig_col = copy.copy(col)
749
        col.table = into
750
        items.append((orig_col, col))
751
    preserve = set(preserve)
752
    for col in cols:
753
        if col not in preserve:
754
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
755
    
756
    if not as_items: items = dict(items)
757
    return items
758

    
759
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
760
    '''For params, see mk_flatten_mapping()
761
    @return See return value of mk_flatten_mapping()
762
    '''
763
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
764
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
765
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
766
        into=into, add_indexes_=True)
767
    return dict(items)
768

    
769
def mk_track_data_error(db, errors_table, cols, value, error_code, error):
770
    assert cols != ()
771
    
772
    cols = map(sql_gen.to_name_only_col, cols)
773
    
774
    columns_cols = ['column']
775
    columns = sql_gen.NamedValues('columns', columns_cols,
776
        [[c.name] for c in cols])
777
    values_cols = ['value', 'error_code', 'error']
778
    values = sql_gen.NamedValues('values', values_cols,
779
        [value, error_code, error])
780
    
781
    select_cols = columns_cols+values_cols
782
    name_only_cols = map(sql_gen.to_name_only_col, select_cols)
783
    errors_table = sql_gen.NamedTable('errors', errors_table)
784
    joins = [columns, sql_gen.Join(values, type_='CROSS'),
785
        sql_gen.Join(errors_table, dict(zip(name_only_cols, select_cols)),
786
        sql_gen.filter_out)]
787
    
788
    return mk_insert_select(db, errors_table, name_only_cols,
789
        mk_select(db, joins, select_cols, order_by=None))
790

    
791
def track_data_error(db, errors_table, cols, *args, **kw_args):
792
    '''
793
    @param errors_table If None, does nothing.
794
    '''
795
    if errors_table == None or cols == (): return
796
    run_query(db, mk_track_data_error(db, errors_table, cols, *args, **kw_args),
797
        cacheable=True, log_level=4)
798

    
799
def cast(db, type_, col, errors_table=None):
800
    '''Casts an (unrenamed) column or value.
801
    If errors_table set and col has srcs, saves errors in errors_table (using
802
    col's srcs attr as the source columns) and converts errors to warnings.
803
    @param col str|sql_gen.Col|sql_gen.Literal
804
    @param errors_table None|sql_gen.Table|str
805
    '''
806
    col = sql_gen.as_Col(col)
807
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
808
        and col.srcs != ())
809
    if not save_errors: # can't save errors
810
        return sql_gen.CustomCode(col.to_str(db)+'::'+type_) # just cast
811
    
812
    assert not isinstance(col, sql_gen.NamedCol)
813
    
814
    errors_table = sql_gen.as_Table(errors_table)
815
    srcs = map(sql_gen.to_name_only_col, col.srcs)
816
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
817
    function = db.TempFunction(function_name)
818
    
819
    while True:
820
        # Create function definition
821
        query = '''\
822
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
823
RETURNS '''+type_+'''
824
LANGUAGE plpgsql
825
STRICT
826
AS $$
827
BEGIN
828
    /* The explicit cast to the return type is needed to make the cast happen
829
    inside the try block. (Implicit casts to the return type happen at the end
830
    of the function, outside any block.) */
831
    RETURN value::'''+type_+''';
832
EXCEPTION
833
    WHEN data_exception THEN
834
        -- Save error in errors table.
835
        -- Insert the value and error for *each* source column.
836
'''+mk_track_data_error(db, errors_table, srcs,
837
    *map(sql_gen.CustomCode, ['value', 'SQLSTATE', 'SQLERRM']))+''';
838
        
839
        RAISE WARNING '%', SQLERRM;
840
        RETURN NULL;
841
END;
842
$$;
843
'''
844
        
845
        # Create function
846
        try:
847
            run_query(db, query, recover=True, cacheable=True,
848
                log_ignore_excs=(DuplicateException,))
849
            break # successful
850
        except DuplicateException:
851
            function.name = next_version(function.name)
852
            # try again with next version of name
853
    
854
    return sql_gen.FunctionCall(function, col)
855

    
856
##### Database structure queries
857

    
858
def table_row_count(db, table, recover=None):
859
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
860
        order_by=None, start=0), recover=recover, log_level=3))
861

    
862
def table_cols(db, table, recover=None):
863
    return list(col_names(select(db, table, limit=0, order_by=None,
864
        recover=recover, log_level=4)))
865

    
866
def pkey(db, table, recover=None):
867
    '''Assumed to be first column in table'''
868
    return table_cols(db, table, recover)[0]
869

    
870
not_null_col = 'not_null_col'
871

    
872
def table_not_null_col(db, table, recover=None):
873
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
874
    if not_null_col in table_cols(db, table, recover): return not_null_col
875
    else: return pkey(db, table, recover)
876

    
877
def index_cols(db, table, index):
878
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
879
    automatically created. When you don't know whether something is a UNIQUE
880
    constraint or a UNIQUE index, use this function.'''
881
    module = util.root_module(db.db)
882
    if module == 'psycopg2':
883
        return list(values(run_query(db, '''\
884
SELECT attname
885
FROM
886
(
887
        SELECT attnum, attname
888
        FROM pg_index
889
        JOIN pg_class index ON index.oid = indexrelid
890
        JOIN pg_class table_ ON table_.oid = indrelid
891
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
892
        WHERE
893
            table_.relname = '''+db.esc_value(table)+'''
894
            AND index.relname = '''+db.esc_value(index)+'''
895
    UNION
896
        SELECT attnum, attname
897
        FROM
898
        (
899
            SELECT
900
                indrelid
901
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
902
                    AS indkey
903
            FROM pg_index
904
            JOIN pg_class index ON index.oid = indexrelid
905
            JOIN pg_class table_ ON table_.oid = indrelid
906
            WHERE
907
                table_.relname = '''+db.esc_value(table)+'''
908
                AND index.relname = '''+db.esc_value(index)+'''
909
        ) s
910
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
911
) s
912
ORDER BY attnum
913
'''
914
            , cacheable=True, log_level=4)))
915
    else: raise NotImplementedError("Can't list index columns for "+module+
916
        ' database')
917

    
918
def constraint_cols(db, table, constraint):
919
    module = util.root_module(db.db)
920
    if module == 'psycopg2':
921
        return list(values(run_query(db, '''\
922
SELECT attname
923
FROM pg_constraint
924
JOIN pg_class ON pg_class.oid = conrelid
925
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
926
WHERE
927
    relname = '''+db.esc_value(table)+'''
928
    AND conname = '''+db.esc_value(constraint)+'''
929
ORDER BY attnum
930
'''
931
            )))
932
    else: raise NotImplementedError("Can't list constraint columns for "+module+
933
        ' database')
934

    
935
row_num_col = '_row_num'
936

    
937
def add_index(db, exprs, table=None, unique=False, ensure_not_null=True):
938
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
939
    Currently, only function calls are supported as expressions.
940
    @param ensure_not_null If set, translates NULL values to sentinel values.
941
        This allows indexes to be used for comparisons where NULLs are equal.
942
    '''
943
    exprs = lists.mk_seq(exprs)
944
    
945
    # Parse exprs
946
    old_exprs = exprs[:]
947
    exprs = []
948
    cols = []
949
    for i, expr in enumerate(old_exprs):
950
        expr = copy.deepcopy(expr) # don't modify input!
951
        expr = sql_gen.as_Col(expr, table)
952
        
953
        # Handle nullable columns
954
        if ensure_not_null:
955
            try: expr = sql_gen.ensure_not_null(db, expr)
956
            except KeyError: pass # unknown type, so just create plain index
957
        
958
        # Extract col
959
        if isinstance(expr, sql_gen.FunctionCall):
960
            col = expr.args[0]
961
            expr = sql_gen.Expr(expr)
962
        else: col = expr
963
        assert isinstance(col, sql_gen.Col)
964
        
965
        # Extract table
966
        if table == None:
967
            assert sql_gen.is_table_col(col)
968
            table = col.table
969
        
970
        col.table = None
971
        
972
        exprs.append(expr)
973
        cols.append(col)
974
    
975
    table = sql_gen.as_Table(table)
976
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
977
    
978
    str_ = 'CREATE'
979
    if unique: str_ += ' UNIQUE'
980
    str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
981
        ', '.join((v.to_str(db) for v in exprs)))+')'
982
    
983
    try: run_query(db, str_, recover=True, cacheable=True, log_level=3)
984
    except DuplicateException: pass # index already existed
985

    
986
def add_pkey(db, table, cols=None, recover=None):
987
    '''Adds a primary key.
988
    @param cols [sql_gen.Col,...] The columns in the primary key.
989
        Defaults to the first column in the table.
990
    @pre The table must not already have a primary key.
991
    '''
992
    table = sql_gen.as_Table(table)
993
    if cols == None: cols = [pkey(db, table, recover)]
994
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
995
    
996
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
997
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
998
        log_ignore_excs=(DuplicateException,))
999

    
1000
already_indexed = object() # tells add_indexes() the pkey has already been added
1001

    
1002
def add_indexes(db, table, has_pkey=True):
1003
    '''Adds an index on all columns in a table.
1004
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1005
        index should be added on the first column.
1006
        * If already_indexed, the pkey is assumed to have already been added
1007
    '''
1008
    cols = table_cols(db, table)
1009
    if has_pkey:
1010
        if has_pkey is not already_indexed: add_pkey(db, table)
1011
        cols = cols[1:]
1012
    for col in cols: add_index(db, col, table)
1013

    
1014
def add_col(db, table, col, **kw_args):
1015
    assert isinstance(col, sql_gen.TypedCol)
1016
    try: run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '
1017
        +col.to_str(db), recover=True, cacheable=True, **kw_args)
1018
    except DuplicateException: pass # column already existed
1019

    
1020
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
1021
    constraints='PRIMARY KEY')
1022

    
1023
def add_row_num(db, table):
1024
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1025
    be the primary key.'''
1026
    add_col(db, table, row_num_typed_col, log_level=3)
1027

    
1028
def cast_temp_col(db, type_, col, errors_table=None):
1029
    '''Like cast(), but creates a new column with the cast values if the input
1030
    is a column.
1031
    @return The new column or cast value
1032
    '''
1033
    def cast_(col): return cast(db, type_, col, errors_table)
1034
    
1035
    try: col = sql_gen.underlying_col(col)
1036
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
1037
    
1038
    table = col.table
1039
    new_col = sql_gen.Col(sql_gen.concat(col.name, '::'+type_), table, col.srcs)
1040
    expr = cast_(col)
1041
    add_col(db, table, sql_gen.TypedCol(new_col.name, type_))
1042
    update(db, table, [(new_col, expr)])
1043
    add_index(db, new_col)
1044
    
1045
    return new_col
1046

    
1047
def drop_table(db, table):
1048
    table = sql_gen.as_Table(table)
1049
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1050

    
1051
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
1052
    '''Creates a table.
1053
    @param cols [sql_gen.TypedCol,...] The column names and types
1054
    @param has_pkey If set, the first column becomes the primary key.
1055
    @param col_indexes bool|[ref]
1056
        * If True, indexes will be added on all non-pkey columns.
1057
        * If a list reference, [0] will be set to a function to do this.
1058
          This can be used to delay index creation until the table is populated.
1059
    '''
1060
    table = sql_gen.as_Table(table)
1061
    
1062
    if has_pkey:
1063
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1064
        pkey.constraints = 'PRIMARY KEY'
1065
    
1066
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1067
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1068
    str_ += '\n);\n'
1069
    run_query(db, str_, cacheable=True, log_level=2)
1070
    
1071
    # Add indexes
1072
    if has_pkey: has_pkey = already_indexed
1073
    def add_indexes_(): add_indexes(db, table, has_pkey)
1074
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1075
    elif col_indexes: add_indexes_() # add now
1076

    
1077
def vacuum(db, table):
1078
    table = sql_gen.as_Table(table)
1079
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1080
        log_level=3))
1081

    
1082
def truncate(db, table, schema='public', **kw_args):
1083
    '''For params, see run_query()'''
1084
    table = sql_gen.as_Table(table, schema)
1085
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1086

    
1087
def empty_temp(db, tables):
1088
    if db.debug_temp: return # leave temp tables there for debugging
1089
    tables = lists.mk_seq(tables)
1090
    for table in tables: truncate(db, table, log_level=3)
1091

    
1092
def tables(db, schema_like='public', table_like='%', exact=False):
1093
    if exact: compare = '='
1094
    else: compare = 'LIKE'
1095
    
1096
    module = util.root_module(db.db)
1097
    if module == 'psycopg2':
1098
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1099
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1100
        return values(select(db, 'pg_tables', ['tablename'], conds,
1101
            order_by='tablename', log_level=4))
1102
    elif module == 'MySQLdb':
1103
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1104
            , cacheable=True, log_level=4))
1105
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1106

    
1107
def table_exists(db, table):
1108
    table = sql_gen.as_Table(table)
1109
    return list(tables(db, table.schema, table.name, exact=True)) != []
1110

    
1111
def function_exists(db, function):
1112
    function = sql_gen.as_Function(function)
1113
    
1114
    info_table = sql_gen.Table('routines', 'information_schema')
1115
    conds = [('routine_name', function.name)]
1116
    schema = function.schema
1117
    if schema != None: conds.append(('routine_schema', schema))
1118
    # Exclude trigger functions, since they cannot be called directly
1119
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
1120
    
1121
    return list(values(select(db, info_table, ['routine_name'], conds,
1122
        order_by='routine_schema', limit=1, log_level=4))) != []
1123
        # TODO: order_by search_path schema order
1124

    
1125
def errors_table(db, table, if_exists=True):
1126
    '''
1127
    @param if_exists If set, returns None if the errors table doesn't exist
1128
    @return None|sql_gen.Table
1129
    '''
1130
    table = sql_gen.as_Table(table)
1131
    if table.srcs != (): table = table.srcs[0]
1132
    
1133
    errors_table = sql_gen.suffixed_table(table, '.errors')
1134
    if if_exists and not table_exists(db, errors_table): return None
1135
    return errors_table
1136

    
1137
##### Database management
1138

    
1139
def empty_db(db, schema='public', **kw_args):
1140
    '''For kw_args, see tables()'''
1141
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1142

    
1143
##### Heuristic queries
1144

    
1145
def put(db, table, row, pkey_=None, row_ct_ref=None):
1146
    '''Recovers from errors.
1147
    Only works under PostgreSQL (uses INSERT RETURNING).
1148
    '''
1149
    row = sql_gen.ColDict(db, table, row)
1150
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1151
    
1152
    try:
1153
        cur = insert(db, table, row, pkey_, recover=True)
1154
        if row_ct_ref != None and cur.rowcount >= 0:
1155
            row_ct_ref[0] += cur.rowcount
1156
        return value(cur)
1157
    except DuplicateKeyException, e:
1158
        row = sql_gen.ColDict(db, table,
1159
            util.dict_subset_right_join(row, e.cols))
1160
        return value(select(db, table, [pkey_], row, recover=True))
1161

    
1162
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1163
    '''Recovers from errors'''
1164
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1165
    except StopIteration:
1166
        if not create: raise
1167
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1168

    
1169
def is_func_result(col):
1170
    return col.table.name.find('(') >= 0 and col.name == 'result'
1171

    
1172
def into_table_name(out_table, in_tables0, mapping, is_func):
1173
    def in_col_str(in_col):
1174
        in_col = sql_gen.remove_col_rename(in_col)
1175
        if isinstance(in_col, sql_gen.Col):
1176
            table = in_col.table
1177
            if table == in_tables0:
1178
                in_col = sql_gen.to_name_only_col(in_col)
1179
            elif is_func_result(in_col): in_col = table # omit col name
1180
        return str(in_col)
1181
    
1182
    str_ = str(out_table)
1183
    if is_func:
1184
        str_ += '('
1185
        
1186
        try: value_in_col = mapping['value']
1187
        except KeyError:
1188
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1189
                for k, v in mapping.iteritems()))
1190
        else: str_ += in_col_str(value_in_col)
1191
        
1192
        str_ += ')'
1193
    else:
1194
        out_col = 'rank'
1195
        try: in_col = mapping[out_col]
1196
        except KeyError: str_ += '_pkeys'
1197
        else: # has a rank column, so hierarchical
1198
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
1199
    return str_
1200

    
1201
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1202
    default=None, is_func=False, on_error=exc.raise_):
1203
    '''Recovers from errors.
1204
    Only works under PostgreSQL (uses INSERT RETURNING).
1205
    IMPORTANT: Must be run at the *beginning* of a transaction.
1206
    @param in_tables The main input table to select from, followed by a list of
1207
        tables to join with it using the main input table's pkey
1208
    @param mapping dict(out_table_col=in_table_col, ...)
1209
        * out_table_col: str (*not* sql_gen.Col)
1210
        * in_table_col: sql_gen.Col|literal-value
1211
    @param into The table to contain the output and input pkeys.
1212
        Defaults to `out_table.name+'_pkeys'`.
1213
    @param default The *output* column to use as the pkey for missing rows.
1214
        If this output column does not exist in the mapping, uses None.
1215
    @param is_func Whether out_table is the name of a SQL function, not a table
1216
    @return sql_gen.Col Where the output pkeys are made available
1217
    '''
1218
    out_table = sql_gen.as_Table(out_table)
1219
    
1220
    def log_debug(msg): db.log_debug(msg, level=1.5)
1221
    def col_ustr(str_):
1222
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1223
    
1224
    log_debug('********** New iteration **********')
1225
    log_debug('Inserting these input columns into '+strings.as_tt(
1226
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1227
    
1228
    is_function = function_exists(db, out_table)
1229
    
1230
    if is_function: out_pkey = 'result'
1231
    else: out_pkey = pkey(db, out_table, recover=True)
1232
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1233
    
1234
    if mapping == {}: # need at least one column for INSERT SELECT
1235
        mapping = {out_pkey: None} # ColDict will replace with default value
1236
    
1237
    # Create input joins from list of input tables
1238
    in_tables_ = in_tables[:] # don't modify input!
1239
    in_tables0 = in_tables_.pop(0) # first table is separate
1240
    errors_table_ = errors_table(db, in_tables0)
1241
    in_pkey = pkey(db, in_tables0, recover=True)
1242
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1243
    input_joins = [in_tables0]+[sql_gen.Join(v,
1244
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1245
    
1246
    if into == None:
1247
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1248
    into = sql_gen.as_Table(into)
1249
    
1250
    # Set column sources
1251
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1252
    for col in in_cols:
1253
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1254
    
1255
    log_debug('Joining together input tables into temp table')
1256
    # Place in new table for speed and so don't modify input if values edited
1257
    in_table = sql_gen.Table('in')
1258
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1259
        preserve=[in_pkey_col], start=0))
1260
    input_joins = [in_table]
1261
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1262
    
1263
    mapping = sql_gen.ColDict(db, out_table, mapping)
1264
        # after applying dicts.join() because that returns a plain dict
1265
    
1266
    # Resolve default value column
1267
    if default != None:
1268
        try: default = mapping[default]
1269
        except KeyError:
1270
            db.log_debug('Default value column '
1271
                +strings.as_tt(strings.repr_no_u(default))
1272
                +' does not exist in mapping, falling back to None', level=2.1)
1273
            default = None
1274
    
1275
    pkeys_names = [in_pkey, out_pkey]
1276
    pkeys_cols = [in_pkey_col, out_pkey_col]
1277
    
1278
    pkeys_table_exists_ref = [False]
1279
    def insert_into_pkeys(joins, cols, distinct=False):
1280
        kw_args = {}
1281
        if distinct: kw_args.update(dict(distinct_on=[in_pkey_col]))
1282
        query = mk_select(db, joins, cols, order_by=None, start=0, **kw_args)
1283
        
1284
        if pkeys_table_exists_ref[0]:
1285
            insert_select(db, into, pkeys_names, query)
1286
        else:
1287
            run_query_into(db, query, into=into)
1288
            pkeys_table_exists_ref[0] = True
1289
    
1290
    limit_ref = [None]
1291
    conds = set()
1292
    distinct_on = sql_gen.ColDict(db, out_table)
1293
    def mk_main_select(joins, cols):
1294
        distinct_on_cols = [c.to_Col() for c in distinct_on.values()]
1295
        return mk_select(db, joins, cols, conds, distinct_on_cols,
1296
            limit=limit_ref[0], start=0)
1297
    
1298
    exc_strs = set()
1299
    def log_exc(e):
1300
        e_str = exc.str_(e, first_line_only=True)
1301
        log_debug('Caught exception: '+e_str)
1302
        assert e_str not in exc_strs # avoid infinite loops
1303
        exc_strs.add(e_str)
1304
    
1305
    def remove_all_rows():
1306
        log_debug('Ignoring all rows')
1307
        limit_ref[0] = 0 # just create an empty pkeys table
1308
    
1309
    def ignore(in_col, value, e):
1310
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1311
            e.cause.pgerror)
1312
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
1313
            +strings.as_tt(repr(value)))
1314
    
1315
    def remove_rows(in_col, value, e):
1316
        ignore(in_col, value, e)
1317
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1318
        assert cond not in conds # avoid infinite loops
1319
        conds.add(cond)
1320
    
1321
    def invalid2null(in_col, value, e):
1322
        ignore(in_col, value, e)
1323
        update(db, in_table, [(in_col, None)],
1324
            sql_gen.ColValueCond(in_col, value))
1325
    
1326
    def insert_pkeys_table(which):
1327
        return sql_gen.Table(sql_gen.concat(in_table.name,
1328
            '_insert_'+which+'_pkeys'))
1329
    insert_out_pkeys = insert_pkeys_table('out')
1330
    insert_in_pkeys = insert_pkeys_table('in')
1331
    
1332
    # Do inserts and selects
1333
    join_cols = sql_gen.ColDict(db, out_table)
1334
    while True:
1335
        if limit_ref[0] == 0: # special case
1336
            log_debug('Creating an empty pkeys table')
1337
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1338
                limit=limit_ref[0]), into=insert_out_pkeys)
1339
            break # don't do main case
1340
        
1341
        has_joins = join_cols != {}
1342
        
1343
        log_debug('Trying to insert new rows')
1344
        
1345
        # Prepare to insert new rows
1346
        insert_joins = input_joins[:] # don't modify original!
1347
        insert_args = dict(recover=True, cacheable=False)
1348
        if has_joins:
1349
            insert_joins.append(sql_gen.Join(out_table, join_cols,
1350
                sql_gen.filter_out))
1351
        else:
1352
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1353
        main_select = mk_main_select(insert_joins, mapping.values())
1354
        
1355
        def main_insert():
1356
            if is_function:
1357
                log_debug('Calling function on input rows')
1358
                args = dict(((k.name, v) for k, v in mapping.iteritems()))
1359
                func_call = sql_gen.NamedCol(out_pkey,
1360
                    sql_gen.FunctionCall(out_table, **args))
1361
                insert_into_pkeys(input_joins, [in_pkey_col, func_call])
1362
                return None
1363
            else:
1364
                lock_table(db, out_table, 'EXCLUSIVE')
1365
                return insert_select(db, out_table, mapping.keys(), main_select,
1366
                    **insert_args)
1367
        
1368
        try:
1369
            cur = with_savepoint(db, main_insert)
1370
            break # insert successful
1371
        except MissingCastException, e:
1372
            log_exc(e)
1373
            
1374
            out_col = e.col
1375
            type_ = e.type
1376
            
1377
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1378
                +strings.as_tt(type_))
1379
            mapping[out_col] = cast_temp_col(db, type_, mapping[out_col],
1380
                errors_table_)
1381
        except DuplicateKeyException, e:
1382
            log_exc(e)
1383
            
1384
            old_join_cols = join_cols.copy()
1385
            distinct_on.update(util.dict_subset(mapping, e.cols))
1386
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1387
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1388
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1389
            assert join_cols != old_join_cols # avoid infinite loops
1390
        except NullValueException, e:
1391
            log_exc(e)
1392
            
1393
            out_col, = e.cols
1394
            try: in_col = mapping[out_col]
1395
            except KeyError:
1396
                log_debug('Missing mapping for NOT NULL column '+out_col)
1397
                remove_all_rows()
1398
            else: remove_rows(in_col, None, e)
1399
        except FunctionValueException, e:
1400
            log_exc(e)
1401
            
1402
            func_name = e.name
1403
            value = e.value
1404
            for out_col, in_col in mapping.iteritems():
1405
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1406
                invalid2null(in_col, value, e)
1407
        except DatabaseErrors, e:
1408
            log_exc(e)
1409
            
1410
            log_debug('No handler for exception')
1411
            on_error(e)
1412
            remove_all_rows()
1413
        # after exception handled, rerun loop with additional constraints
1414
    
1415
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
1416
        row_ct_ref[0] += cur.rowcount
1417
    
1418
    if is_function: pass # pkeys table already created
1419
    elif has_joins:
1420
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1421
        log_debug('Getting output table pkeys of existing/inserted rows')
1422
        insert_into_pkeys(select_joins, pkeys_cols, distinct=True)
1423
    else:
1424
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1425
        
1426
        log_debug('Getting input table pkeys of inserted rows')
1427
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1428
            into=insert_in_pkeys)
1429
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1430
        
1431
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1432
            insert_in_pkeys)
1433
        
1434
        log_debug('Combining output and input pkeys in inserted order')
1435
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1436
            {row_num_col: sql_gen.join_same_not_null})]
1437
        insert_into_pkeys(pkey_joins, pkeys_names)
1438
        
1439
        empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
1440
    
1441
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1442
    add_pkey(db, into)
1443
    
1444
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1445
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1446
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1447
        # must use join_same_not_null or query will take forever
1448
    insert_into_pkeys(missing_rows_joins,
1449
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1450
    
1451
    assert table_row_count(db, into) == table_row_count(db, in_table)
1452
    
1453
    empty_temp(db, in_table)
1454
    
1455
    srcs = []
1456
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1457
    return sql_gen.Col(out_pkey, into, srcs)
1458

    
1459
##### Data cleanup
1460

    
1461
def cleanup_table(db, table, cols):
1462
    table = sql_gen.as_Table(table)
1463
    cols = map(sql_gen.as_Col, cols)
1464
    
1465
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1466
        +db.esc_value(r'\N')+')')
1467
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1468
        for v in cols]
1469
    
1470
    update(db, table, changes)
(24-24/36)