Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ExceptionWithNameType(DbException):
50
    def __init__(self, type_, name, cause=None):
51
        DbException.__init__(self, 'for type: '+strings.as_tt(str(type_))
52
            +'; name: '+strings.as_tt(name), cause)
53
        self.type = type_
54
        self.name = name
55

    
56
class ConstraintException(DbException):
57
    def __init__(self, name, cols, cause=None):
58
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
59
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
60
        self.name = name
61
        self.cols = cols
62

    
63
class MissingCastException(DbException):
64
    def __init__(self, type_, col, cause=None):
65
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
66
            +' on column: '+strings.as_tt(col), cause)
67
        self.type = type_
68
        self.col = col
69

    
70
class NameException(DbException): pass
71

    
72
class DuplicateKeyException(ConstraintException): pass
73

    
74
class NullValueException(ConstraintException): pass
75

    
76
class FunctionValueException(ExceptionWithNameValue): pass
77

    
78
class DuplicateException(ExceptionWithNameType): pass
79

    
80
class EmptyRowException(DbException): pass
81

    
82
##### Warnings
83

    
84
class DbWarning(UserWarning): pass
85

    
86
##### Result retrieval
87

    
88
def col_names(cur): return (col[0] for col in cur.description)
89

    
90
def rows(cur): return iter(lambda: cur.fetchone(), None)
91

    
92
def consume_rows(cur):
93
    '''Used to fetch all rows so result will be cached'''
94
    iters.consume_iter(rows(cur))
95

    
96
def next_row(cur): return rows(cur).next()
97

    
98
def row(cur):
99
    row_ = next_row(cur)
100
    consume_rows(cur)
101
    return row_
102

    
103
def next_value(cur): return next_row(cur)[0]
104

    
105
def value(cur): return row(cur)[0]
106

    
107
def values(cur): return iters.func_iter(lambda: next_value(cur))
108

    
109
def value_or_none(cur):
110
    try: return value(cur)
111
    except StopIteration: return None
112

    
113
##### Escaping
114

    
115
def esc_name_by_module(module, name):
116
    if module == 'psycopg2' or module == None: quote = '"'
117
    elif module == 'MySQLdb': quote = '`'
118
    else: raise NotImplementedError("Can't escape name for "+module+' database')
119
    return sql_gen.esc_name(name, quote)
120

    
121
def esc_name_by_engine(engine, name, **kw_args):
122
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
123

    
124
def esc_name(db, name, **kw_args):
125
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
126

    
127
def qual_name(db, schema, table):
128
    def esc_name_(name): return esc_name(db, name)
129
    table = esc_name_(table)
130
    if schema != None: return esc_name_(schema)+'.'+table
131
    else: return table
132

    
133
##### Database connections
134

    
135
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
136

    
137
db_engines = {
138
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
139
    'PostgreSQL': ('psycopg2', {}),
140
}
141

    
142
DatabaseErrors_set = set([DbException])
143
DatabaseErrors = tuple(DatabaseErrors_set)
144

    
145
def _add_module(module):
146
    DatabaseErrors_set.add(module.DatabaseError)
147
    global DatabaseErrors
148
    DatabaseErrors = tuple(DatabaseErrors_set)
149

    
150
def db_config_str(db_config):
151
    return db_config['engine']+' database '+db_config['database']
152

    
153
log_debug_none = lambda msg, level=2: None
154

    
155
class DbConn:
156
    def __init__(self, db_config, autocommit=True, caching=True,
157
        log_debug=log_debug_none, debug_temp=False):
158
        '''
159
        @param debug_temp Whether temporary objects should instead be permanent.
160
            This assists in debugging the internal objects used by the program.
161
        '''
162
        self.db_config = db_config
163
        self.autocommit = autocommit
164
        self.caching = caching
165
        self.log_debug = log_debug
166
        self.debug = log_debug != log_debug_none
167
        self.debug_temp = debug_temp
168
        self.autoanalyze = False
169
        
170
        self.__db = None
171
        self.query_results = {}
172
        self._savepoint = 0
173
        self._notices_seen = set()
174
    
175
    def __getattr__(self, name):
176
        if name == '__dict__': raise Exception('getting __dict__')
177
        if name == 'db': return self._db()
178
        else: raise AttributeError()
179
    
180
    def __getstate__(self):
181
        state = copy.copy(self.__dict__) # shallow copy
182
        state['log_debug'] = None # don't pickle the debug callback
183
        state['_DbConn__db'] = None # don't pickle the connection
184
        return state
185
    
186
    def connected(self): return self.__db != None
187
    
188
    def _db(self):
189
        if self.__db == None:
190
            # Process db_config
191
            db_config = self.db_config.copy() # don't modify input!
192
            schemas = db_config.pop('schemas', None)
193
            module_name, mappings = db_engines[db_config.pop('engine')]
194
            module = __import__(module_name)
195
            _add_module(module)
196
            for orig, new in mappings.iteritems():
197
                try: util.rename_key(db_config, orig, new)
198
                except KeyError: pass
199
            
200
            # Connect
201
            self.__db = module.connect(**db_config)
202
            
203
            # Configure connection
204
            if hasattr(self.db, 'set_isolation_level'):
205
                import psycopg2.extensions
206
                self.db.set_isolation_level(
207
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
208
            if schemas != None:
209
                search_path = [self.esc_name(s) for s in schemas.split(',')]
210
                search_path.append(value(run_query(self, 'SHOW search_path',
211
                    log_level=4)))
212
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
213
                    log_level=3)
214
        
215
        return self.__db
216
    
217
    class DbCursor(Proxy):
218
        def __init__(self, outer):
219
            Proxy.__init__(self, outer.db.cursor())
220
            self.outer = outer
221
            self.query_results = outer.query_results
222
            self.query_lookup = None
223
            self.result = []
224
        
225
        def execute(self, query):
226
            self._is_insert = query.startswith('INSERT')
227
            self.query_lookup = query
228
            try:
229
                try:
230
                    cur = self.inner.execute(query)
231
                    self.outer.do_autocommit()
232
                finally: self.query = get_cur_query(self.inner, query)
233
            except Exception, e:
234
                _add_cursor_info(e, self, query)
235
                self.result = e # cache the exception as the result
236
                self._cache_result()
237
                raise
238
            
239
            # Always cache certain queries
240
            if query.startswith('CREATE') or query.startswith('ALTER'):
241
                # structural changes
242
                # Rest of query must be unique in the face of name collisions,
243
                # so don't cache ADD COLUMN unless it has distinguishing comment
244
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
245
                    self._cache_result()
246
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
247
                consume_rows(self) # fetch all rows so result will be cached
248
            
249
            return cur
250
        
251
        def fetchone(self):
252
            row = self.inner.fetchone()
253
            if row != None: self.result.append(row)
254
            # otherwise, fetched all rows
255
            else: self._cache_result()
256
            return row
257
        
258
        def _cache_result(self):
259
            # For inserts that return a result set, don't cache result set since
260
            # inserts are not idempotent. Other non-SELECT queries don't have
261
            # their result set read, so only exceptions will be cached (an
262
            # invalid query will always be invalid).
263
            if self.query_results != None and (not self._is_insert
264
                or isinstance(self.result, Exception)):
265
                
266
                assert self.query_lookup != None
267
                self.query_results[self.query_lookup] = self.CacheCursor(
268
                    util.dict_subset(dicts.AttrsDictView(self),
269
                    ['query', 'result', 'rowcount', 'description']))
270
        
271
        class CacheCursor:
272
            def __init__(self, cached_result): self.__dict__ = cached_result
273
            
274
            def execute(self, *args, **kw_args):
275
                if isinstance(self.result, Exception): raise self.result
276
                # otherwise, result is a rows list
277
                self.iter = iter(self.result)
278
            
279
            def fetchone(self):
280
                try: return self.iter.next()
281
                except StopIteration: return None
282
    
283
    def esc_value(self, value):
284
        try: str_ = self.mogrify('%s', [value])
285
        except NotImplementedError, e:
286
            module = util.root_module(self.db)
287
            if module == 'MySQLdb':
288
                import _mysql
289
                str_ = _mysql.escape_string(value)
290
            else: raise e
291
        return strings.to_unicode(str_)
292
    
293
    def esc_name(self, name): return esc_name(self, name) # calls global func
294
    
295
    def std_code(self, str_):
296
        '''Standardizes SQL code.
297
        * Ensures that string literals are prefixed by `E`
298
        '''
299
        if str_.startswith("'"): str_ = 'E'+str_
300
        return str_
301
    
302
    def can_mogrify(self):
303
        module = util.root_module(self.db)
304
        return module == 'psycopg2'
305
    
306
    def mogrify(self, query, params=None):
307
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
308
        else: raise NotImplementedError("Can't mogrify query")
309
    
310
    def print_notices(self):
311
        if hasattr(self.db, 'notices'):
312
            for msg in self.db.notices:
313
                if msg not in self._notices_seen:
314
                    self._notices_seen.add(msg)
315
                    self.log_debug(msg, level=2)
316
    
317
    def run_query(self, query, cacheable=False, log_level=2,
318
        debug_msg_ref=None):
319
        '''
320
        @param log_ignore_excs The log_level will be increased by 2 if the query
321
            throws one of these exceptions.
322
        @param debug_msg_ref If specified, the log message will be returned in
323
            this instead of being output. This allows you to filter log messages
324
            depending on the result of the query.
325
        '''
326
        assert query != None
327
        
328
        if not self.caching: cacheable = False
329
        used_cache = False
330
        
331
        def log_msg(query):
332
            if used_cache: cache_status = 'cache hit'
333
            elif cacheable: cache_status = 'cache miss'
334
            else: cache_status = 'non-cacheable'
335
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
336
        
337
        try:
338
            # Get cursor
339
            if cacheable:
340
                try:
341
                    cur = self.query_results[query]
342
                    used_cache = True
343
                except KeyError: cur = self.DbCursor(self)
344
            else: cur = self.db.cursor()
345
            
346
            # Log query
347
            if self.debug and debug_msg_ref == None: # log before running
348
                self.log_debug(log_msg(query), log_level)
349
            
350
            # Run query
351
            cur.execute(query)
352
        finally:
353
            self.print_notices()
354
            if self.debug and debug_msg_ref != None: # return after running
355
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
356
        
357
        return cur
358
    
359
    def is_cached(self, query): return query in self.query_results
360
    
361
    def with_autocommit(self, func):
362
        import psycopg2.extensions
363
        
364
        prev_isolation_level = self.db.isolation_level
365
        self.db.set_isolation_level(
366
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
367
        try: return func()
368
        finally: self.db.set_isolation_level(prev_isolation_level)
369
    
370
    def with_savepoint(self, func):
371
        savepoint = 'level_'+str(self._savepoint)
372
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
373
        self._savepoint += 1
374
        try: return func()
375
        except:
376
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
377
            raise
378
        finally:
379
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
380
            # "The savepoint remains valid and can be rolled back to again"
381
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
382
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
383
            
384
            self._savepoint -= 1
385
            assert self._savepoint >= 0
386
            
387
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
388
    
389
    def do_autocommit(self):
390
        '''Autocommits if outside savepoint'''
391
        assert self._savepoint >= 0
392
        if self.autocommit and self._savepoint == 0:
393
            self.log_debug('Autocommitting', level=4)
394
            self.db.commit()
395
    
396
    def col_info(self, col):
397
        table = sql_gen.Table('columns', 'information_schema')
398
        type_ = sql_gen.Coalesce(sql_gen.Nullif(sql_gen.Col('data_type'),
399
            'USER-DEFINED'), sql_gen.Col('udt_name'))
400
        cols = [type_, 'column_default', cast(self, 'boolean', 'is_nullable')]
401
        
402
        conds = [('table_name', col.table.name), ('column_name', col.name)]
403
        schema = col.table.schema
404
        if schema != None: conds.append(('table_schema', schema))
405
        
406
        type_, default, nullable = row(select(self, table, cols, conds,
407
            order_by='table_schema', limit=1, cacheable=False, log_level=4))
408
            # TODO: order_by search_path schema order
409
        default = sql_gen.as_Code(default, self)
410
        
411
        return sql_gen.TypedCol(col.name, type_, default, nullable)
412
    
413
    def TempFunction(self, name):
414
        if self.debug_temp: schema = None
415
        else: schema = 'pg_temp'
416
        return sql_gen.Function(name, schema)
417

    
418
connect = DbConn
419

    
420
##### Recoverable querying
421

    
422
def with_savepoint(db, func): return db.with_savepoint(func)
423

    
424
def run_query(db, query, recover=None, cacheable=False, log_level=2,
425
    log_ignore_excs=None, **kw_args):
426
    '''For params, see DbConn.run_query()'''
427
    if recover == None: recover = False
428
    if log_ignore_excs == None: log_ignore_excs = ()
429
    log_ignore_excs = tuple(log_ignore_excs)
430
    
431
    debug_msg_ref = None # usually, db.run_query() logs query before running it
432
    # But if filtering with log_ignore_excs, wait until after exception parsing
433
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None]
434
    
435
    try:
436
        try:
437
            def run(): return db.run_query(query, cacheable, log_level,
438
                debug_msg_ref, **kw_args)
439
            if recover and not db.is_cached(query):
440
                return with_savepoint(db, run)
441
            else: return run() # don't need savepoint if cached
442
        except Exception, e:
443
            msg = exc.str_(e)
444
            
445
            match = re.search(r'duplicate key value violates unique constraint '
446
                r'"((_?[^\W_]+)_.+?)"', msg)
447
            if match:
448
                constraint, table = match.groups()
449
                cols = []
450
                if recover: # need auto-rollback to run index_cols()
451
                    try: cols = index_cols(db, table, constraint)
452
                    except NotImplementedError: pass
453
                raise DuplicateKeyException(constraint, cols, e)
454
            
455
            match = re.search(r'null value in column "(.+?)" violates not-null'
456
                r' constraint', msg)
457
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
458
            
459
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
460
                r'|date/time field value out of range): "(.+?)"\n'
461
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
462
            if match:
463
                value, name = match.groups()
464
                raise FunctionValueException(name, strings.to_unicode(value), e)
465
            
466
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
467
                r'is of type', msg)
468
            if match:
469
                col, type_ = match.groups()
470
                raise MissingCastException(type_, col, e)
471
            
472
            match = re.search(r'\b(\S+) "(.+?)".*? already exists', msg)
473
            if match:
474
                type_, name = match.groups()
475
                raise DuplicateException(type_, name, e)
476
            
477
            raise # no specific exception raised
478
    except log_ignore_excs:
479
        log_level += 2
480
        raise
481
    finally:
482
        if debug_msg_ref != None and debug_msg_ref[0] != None:
483
            db.log_debug(debug_msg_ref[0], log_level)
484

    
485
##### Basic queries
486

    
487
def next_version(name):
488
    version = 1 # first existing name was version 0
489
    match = re.match(r'^(.*)#(\d+)$', name)
490
    if match:
491
        name, version = match.groups()
492
        version = int(version)+1
493
    return sql_gen.concat(name, '#'+str(version))
494

    
495
def lock_table(db, table, mode):
496
    table = sql_gen.as_Table(table)
497
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
498

    
499
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
500
    '''Outputs a query to a temp table.
501
    For params, see run_query().
502
    '''
503
    if into == None: return run_query(db, query, **kw_args)
504
    
505
    assert isinstance(into, sql_gen.Table)
506
    
507
    into.is_temp = True
508
    # "temporary tables cannot specify a schema name", so remove schema
509
    into.schema = None
510
    
511
    kw_args['recover'] = True
512
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
513
    
514
    temp = not db.debug_temp # tables are permanent in debug_temp mode
515
    
516
    # Create table
517
    while True:
518
        create_query = 'CREATE'
519
        if temp: create_query += ' TEMP'
520
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
521
        
522
        try:
523
            cur = run_query(db, create_query, **kw_args)
524
                # CREATE TABLE AS sets rowcount to # rows in query
525
            break
526
        except DuplicateException, e:
527
            into.name = next_version(into.name)
528
            # try again with next version of name
529
    
530
    if add_indexes_: add_indexes(db, into)
531
    
532
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
533
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
534
    # table is going to be used in complex queries, it is wise to run ANALYZE on
535
    # the temporary table after it is populated."
536
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
537
    # If into is not a temp table, ANALYZE is useful but not required.
538
    analyze(db, into)
539
    
540
    return cur
541

    
542
order_by_pkey = object() # tells mk_select() to order by the pkey
543

    
544
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
545

    
546
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
547
    start=None, order_by=order_by_pkey, default_table=None):
548
    '''
549
    @param tables The single table to select from, or a list of tables to join
550
        together, with tables after the first being sql_gen.Join objects
551
    @param fields Use None to select all fields in the table
552
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
553
        * container can be any iterable type
554
        * compare_left_side: sql_gen.Code|str (for col name)
555
        * compare_right_side: sql_gen.ValueCond|literal value
556
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
557
        use all columns
558
    @return query
559
    '''
560
    # Parse tables param
561
    tables = lists.mk_seq(tables)
562
    tables = list(tables) # don't modify input! (list() copies input)
563
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
564
    
565
    # Parse other params
566
    if conds == None: conds = []
567
    elif dicts.is_dict(conds): conds = conds.items()
568
    conds = list(conds) # don't modify input! (list() copies input)
569
    assert limit == None or type(limit) == int
570
    assert start == None or type(start) == int
571
    if order_by is order_by_pkey:
572
        if distinct_on != []: order_by = None
573
        else: order_by = pkey(db, table0, recover=True)
574
    
575
    query = 'SELECT'
576
    
577
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
578
    
579
    # DISTINCT ON columns
580
    if distinct_on != []:
581
        query += '\nDISTINCT'
582
        if distinct_on is not distinct_on_all:
583
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
584
    
585
    # Columns
586
    if fields == None:
587
        if query.find('\n') >= 0: whitespace = '\n'
588
        else: whitespace = ' '
589
        query += whitespace+'*'
590
    else:
591
        assert fields != []
592
        query += '\n'+('\n, '.join(map(parse_col, fields)))
593
    
594
    # Main table
595
    query += '\nFROM '+table0.to_str(db)
596
    
597
    # Add joins
598
    left_table = table0
599
    for join_ in tables:
600
        table = join_.table
601
        
602
        # Parse special values
603
        if join_.type_ is sql_gen.filter_out: # filter no match
604
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
605
                sql_gen.CompareCond(None, '~=')))
606
        
607
        query += '\n'+join_.to_str(db, left_table)
608
        
609
        left_table = table
610
    
611
    missing = True
612
    if conds != []:
613
        if len(conds) == 1: whitespace = ' '
614
        else: whitespace = '\n'
615
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
616
            .to_str(db) for l, r in conds], 'WHERE')
617
        missing = False
618
    if order_by != None:
619
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
620
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
621
    if start != None:
622
        if start != 0: query += '\nOFFSET '+str(start)
623
        missing = False
624
    if missing: warnings.warn(DbWarning(
625
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
626
    
627
    return query
628

    
629
def select(db, *args, **kw_args):
630
    '''For params, see mk_select() and run_query()'''
631
    recover = kw_args.pop('recover', None)
632
    cacheable = kw_args.pop('cacheable', True)
633
    log_level = kw_args.pop('log_level', 2)
634
    
635
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
636
        log_level=log_level)
637

    
638
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
639
    embeddable=False, ignore=False):
640
    '''
641
    @param returning str|None An inserted column (such as pkey) to return
642
    @param embeddable Whether the query should be embeddable as a nested SELECT.
643
        Warning: If you set this and cacheable=True when the query is run, the
644
        query will be fully cached, not just if it raises an exception.
645
    @param ignore Whether to ignore duplicate keys.
646
    '''
647
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
648
    if cols == []: cols = None # no cols (all defaults) = unknown col names
649
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
650
    if select_query == None: select_query = 'DEFAULT VALUES'
651
    if returning != None: returning = sql_gen.as_Col(returning, table)
652
    
653
    first_line = 'INSERT INTO '+table.to_str(db)
654
    
655
    def mk_insert(select_query):
656
        query = first_line
657
        if cols != None:
658
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
659
        query += '\n'+select_query
660
        
661
        if returning != None:
662
            returning_name_col = sql_gen.to_name_only_col(returning)
663
            query += '\nRETURNING '+returning_name_col.to_str(db)
664
        
665
        return query
666
    
667
    return_type = 'unknown'
668
    if returning != None: return_type = returning.to_str(db)+'%TYPE'
669
    
670
    lang = 'sql'
671
    if ignore:
672
        assert cols != None
673
        # Always return something to set the correct rowcount
674
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
675
        
676
        embeddable = True # must use function
677
        lang = 'plpgsql'
678
        row = [sql_gen.Col(c.name, 'row') for c in cols]
679
        
680
        query = '''\
681
DECLARE
682
    row '''+table.to_str(db)+'''%ROWTYPE;
683
BEGIN
684
    /* Need an EXCEPTION block for each individual row because "When an error is
685
    caught by an EXCEPTION clause, [...] all changes to persistent database
686
    state within the block are rolled back."
687
    This is unfortunate because "A block containing an EXCEPTION clause is
688
    significantly more expensive to enter and exit than a block without one."
689
    (http://www.postgresql.org/docs/8.3/static/plpgsql-control-structures.html\
690
#PLPGSQL-ERROR-TRAPPING)
691
    */
692
    FOR '''+(', '.join((c.to_str(db) for c in row)))+''' IN
693
'''+select_query+'''
694
    LOOP
695
        BEGIN
696
            RETURN QUERY
697
'''+mk_insert(sql_gen.Values(row).to_str(db))+'''
698
;
699
        EXCEPTION
700
            WHEN unique_violation THEN NULL; -- continue to next row
701
        END;
702
    END LOOP;
703
END;\
704
'''
705
    else: query = mk_insert(select_query)
706
    
707
    if embeddable:
708
        # Create function
709
        function_name = sql_gen.clean_name(first_line)
710
        while True:
711
            try:
712
                function = db.TempFunction(function_name)
713
                
714
                function_query = '''\
715
CREATE FUNCTION '''+function.to_str(db)+'''()
716
RETURNS SETOF '''+return_type+'''
717
LANGUAGE '''+lang+'''
718
AS $$
719
'''+query+'''
720
$$;
721
'''
722
                run_query(db, function_query, recover=True, cacheable=True,
723
                    log_ignore_excs=(DuplicateException,))
724
                break # this version was successful
725
            except DuplicateException, e:
726
                function_name = next_version(function_name)
727
                # try again with next version of name
728
        
729
        # Return query that uses function
730
        cols = None
731
        if returning != None: cols = [returning]
732
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
733
            cols) # AS clause requires function alias
734
        return mk_select(db, func_table, start=0, order_by=None)
735
    
736
    return query
737

    
738
def insert_select(db, table, *args, **kw_args):
739
    '''For params, see mk_insert_select() and run_query_into()
740
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
741
        values in
742
    '''
743
    into = kw_args.pop('into', None)
744
    if into != None: kw_args['embeddable'] = True
745
    recover = kw_args.pop('recover', None)
746
    if kw_args.get('ignore', False): recover = True
747
    cacheable = kw_args.pop('cacheable', True)
748
    log_level = kw_args.pop('log_level', 2)
749
    
750
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
751
        into, recover=recover, cacheable=cacheable, log_level=log_level)
752
    autoanalyze(db, table)
753
    return cur
754

    
755
default = sql_gen.default # tells insert() to use the default value for a column
756

    
757
def insert(db, table, row, *args, **kw_args):
758
    '''For params, see insert_select()'''
759
    if lists.is_seq(row): cols = None
760
    else:
761
        cols = row.keys()
762
        row = row.values()
763
    row = list(row) # ensure that "== []" works
764
    
765
    if row == []: query = None
766
    else: query = sql_gen.Values(row).to_str(db)
767
    
768
    return insert_select(db, table, cols, query, *args, **kw_args)
769

    
770
def mk_update(db, table, changes=None, cond=None, in_place=False):
771
    '''
772
    @param changes [(col, new_value),...]
773
        * container can be any iterable type
774
        * col: sql_gen.Code|str (for col name)
775
        * new_value: sql_gen.Code|literal value
776
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
777
    @param in_place If set, locks the table and updates rows in place.
778
        This avoids creating dead rows in PostgreSQL.
779
        * cond must be None
780
    @return str query
781
    '''
782
    table = sql_gen.as_Table(table)
783
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
784
        for c, v in changes]
785
    
786
    if in_place:
787
        assert cond == None
788
        
789
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
790
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '
791
            +db.col_info(sql_gen.with_default_table(c, table)).type
792
            +'\nUSING '+v.to_str(db) for c, v in changes))
793
    else:
794
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
795
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
796
            for c, v in changes))
797
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
798
    
799
    return query
800

    
801
def update(db, table, *args, **kw_args):
802
    '''For params, see mk_update() and run_query()'''
803
    recover = kw_args.pop('recover', None)
804
    cacheable = kw_args.pop('cacheable', False)
805
    log_level = kw_args.pop('log_level', 2)
806
    
807
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
808
        cacheable, log_level=log_level)
809
    autoanalyze(db, table)
810
    return cur
811

    
812
def last_insert_id(db):
813
    module = util.root_module(db.db)
814
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
815
    elif module == 'MySQLdb': return db.insert_id()
816
    else: return None
817

    
818
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
819
    '''Creates a mapping from original column names (which may have collisions)
820
    to names that will be distinct among the columns' tables.
821
    This is meant to be used for several tables that are being joined together.
822
    @param cols The columns to combine. Duplicates will be removed.
823
    @param into The table for the new columns.
824
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
825
        columns will be included in the mapping even if they are not in cols.
826
        The tables of the provided Col objects will be changed to into, so make
827
        copies of them if you want to keep the original tables.
828
    @param as_items Whether to return a list of dict items instead of a dict
829
    @return dict(orig_col=new_col, ...)
830
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
831
        * new_col: sql_gen.Col(orig_col_name, into)
832
        * All mappings use the into table so its name can easily be
833
          changed for all columns at once
834
    '''
835
    cols = lists.uniqify(cols)
836
    
837
    items = []
838
    for col in preserve:
839
        orig_col = copy.copy(col)
840
        col.table = into
841
        items.append((orig_col, col))
842
    preserve = set(preserve)
843
    for col in cols:
844
        if col not in preserve:
845
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
846
    
847
    if not as_items: items = dict(items)
848
    return items
849

    
850
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
851
    '''For params, see mk_flatten_mapping()
852
    @return See return value of mk_flatten_mapping()
853
    '''
854
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
855
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
856
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
857
        into=into, add_indexes_=True)
858
    return dict(items)
859

    
860
def track_data_error(db, errors_table, cols, value, error_code, error):
861
    '''
862
    @param errors_table If None, does nothing.
863
    '''
864
    if errors_table == None or cols == (): return
865
    
866
    for col in cols:
867
        try:
868
            insert(db, errors_table, dict(column=col.name, value=value,
869
                error_code=error_code, error=error), recover=True,
870
                cacheable=True, log_level=4)
871
        except DuplicateKeyException: pass
872

    
873
def cast(db, type_, col, errors_table=None):
874
    '''Casts an (unrenamed) column or value.
875
    If errors_table set and col has srcs, saves errors in errors_table (using
876
    col's srcs attr as the source columns) and converts errors to warnings.
877
    @param col str|sql_gen.Col|sql_gen.Literal
878
    @param errors_table None|sql_gen.Table|str
879
    '''
880
    col = sql_gen.as_Col(col)
881
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
882
        and col.srcs != ())
883
    if not save_errors: return sql_gen.Cast(type_, col) # can't save errors
884
    
885
    assert not isinstance(col, sql_gen.NamedCol)
886
    
887
    errors_table = sql_gen.as_Table(errors_table)
888
    srcs = map(sql_gen.to_name_only_col, col.srcs)
889
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
890
    function = db.TempFunction(function_name)
891
    
892
    while True:
893
        # Create function definition
894
        errors_table_cols = map(sql_gen.Col,
895
            ['column', 'value', 'error_code', 'error'])
896
        query = '''\
897
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
898
RETURNS '''+type_+'''
899
LANGUAGE plpgsql
900
STRICT
901
AS $$
902
BEGIN
903
    /* The explicit cast to the return type is needed to make the cast happen
904
    inside the try block. (Implicit casts to the return type happen at the end
905
    of the function, outside any block.) */
906
    RETURN value::'''+type_+''';
907
EXCEPTION
908
    WHEN data_exception THEN
909
        -- Save error in errors table.
910
        DECLARE
911
            error_code text := SQLSTATE;
912
            error text := SQLERRM;
913
            "column" text;
914
        BEGIN
915
            -- Insert the value and error for *each* source column.
916
            FOR "column" IN
917
'''+mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
918
    order_by=None, start=0)+'''
919
            LOOP
920
                BEGIN
921
'''+mk_insert_select(db, errors_table, errors_table_cols,
922
    sql_gen.Values(errors_table_cols).to_str(db))+''';
923
                EXCEPTION
924
                    WHEN unique_violation THEN NULL; -- continue to next row
925
                END;
926
            END LOOP;
927
        END;
928
        
929
        RAISE WARNING '%', SQLERRM;
930
        RETURN NULL;
931
END;
932
$$;
933
'''
934
        
935
        # Create function
936
        try:
937
            run_query(db, query, recover=True, cacheable=True,
938
                log_ignore_excs=(DuplicateException,))
939
            break # successful
940
        except DuplicateException:
941
            function.name = next_version(function.name)
942
            # try again with next version of name
943
    
944
    return sql_gen.FunctionCall(function, col)
945

    
946
##### Database structure queries
947

    
948
def table_row_count(db, table, recover=None):
949
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
950
        order_by=None, start=0), recover=recover, log_level=3))
951

    
952
def table_cols(db, table, recover=None):
953
    return list(col_names(select(db, table, limit=0, order_by=None,
954
        recover=recover, log_level=4)))
955

    
956
def pkey(db, table, recover=None):
957
    '''Assumed to be first column in table'''
958
    return table_cols(db, table, recover)[0]
959

    
960
not_null_col = 'not_null_col'
961

    
962
def table_not_null_col(db, table, recover=None):
963
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
964
    if not_null_col in table_cols(db, table, recover): return not_null_col
965
    else: return pkey(db, table, recover)
966

    
967
def index_cols(db, table, index):
968
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
969
    automatically created. When you don't know whether something is a UNIQUE
970
    constraint or a UNIQUE index, use this function.'''
971
    module = util.root_module(db.db)
972
    if module == 'psycopg2':
973
        return list(values(run_query(db, '''\
974
SELECT attname
975
FROM
976
(
977
        SELECT attnum, attname
978
        FROM pg_index
979
        JOIN pg_class index ON index.oid = indexrelid
980
        JOIN pg_class table_ ON table_.oid = indrelid
981
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
982
        WHERE
983
            table_.relname = '''+db.esc_value(table)+'''
984
            AND index.relname = '''+db.esc_value(index)+'''
985
    UNION
986
        SELECT attnum, attname
987
        FROM
988
        (
989
            SELECT
990
                indrelid
991
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
992
                    AS indkey
993
            FROM pg_index
994
            JOIN pg_class index ON index.oid = indexrelid
995
            JOIN pg_class table_ ON table_.oid = indrelid
996
            WHERE
997
                table_.relname = '''+db.esc_value(table)+'''
998
                AND index.relname = '''+db.esc_value(index)+'''
999
        ) s
1000
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
1001
) s
1002
ORDER BY attnum
1003
'''
1004
            , cacheable=True, log_level=4)))
1005
    else: raise NotImplementedError("Can't list index columns for "+module+
1006
        ' database')
1007

    
1008
def constraint_cols(db, table, constraint):
1009
    module = util.root_module(db.db)
1010
    if module == 'psycopg2':
1011
        return list(values(run_query(db, '''\
1012
SELECT attname
1013
FROM pg_constraint
1014
JOIN pg_class ON pg_class.oid = conrelid
1015
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
1016
WHERE
1017
    relname = '''+db.esc_value(table)+'''
1018
    AND conname = '''+db.esc_value(constraint)+'''
1019
ORDER BY attnum
1020
'''
1021
            )))
1022
    else: raise NotImplementedError("Can't list constraint columns for "+module+
1023
        ' database')
1024

    
1025
row_num_col = '_row_num'
1026

    
1027
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1028
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1029
    Currently, only function calls are supported as expressions.
1030
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1031
        This allows indexes to be used for comparisons where NULLs are equal.
1032
    '''
1033
    exprs = lists.mk_seq(exprs)
1034
    
1035
    # Parse exprs
1036
    old_exprs = exprs[:]
1037
    exprs = []
1038
    cols = []
1039
    for i, expr in enumerate(old_exprs):
1040
        expr = sql_gen.as_Col(expr, table)
1041
        
1042
        # Handle nullable columns
1043
        if ensure_not_null_:
1044
            try: expr = ensure_not_null(db, expr)
1045
            except KeyError: pass # unknown type, so just create plain index
1046
        
1047
        # Extract col
1048
        expr = copy.deepcopy(expr) # don't modify input!
1049
        if isinstance(expr, sql_gen.FunctionCall):
1050
            col = expr.args[0]
1051
            expr = sql_gen.Expr(expr)
1052
        else: col = expr
1053
        assert isinstance(col, sql_gen.Col)
1054
        
1055
        # Extract table
1056
        if table == None:
1057
            assert sql_gen.is_table_col(col)
1058
            table = col.table
1059
        
1060
        col.table = None
1061
        
1062
        exprs.append(expr)
1063
        cols.append(col)
1064
    
1065
    table = sql_gen.as_Table(table)
1066
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
1067
    
1068
    # Add index
1069
    while True:
1070
        str_ = 'CREATE'
1071
        if unique: str_ += ' UNIQUE'
1072
        str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
1073
            ', '.join((v.to_str(db) for v in exprs)))+')'
1074
        
1075
        try:
1076
            run_query(db, str_, recover=True, cacheable=True, log_level=3,
1077
                log_ignore_excs=(DuplicateException,))
1078
            break
1079
        except DuplicateException:
1080
            index.name = next_version(index.name)
1081
            # try again with next version of name
1082

    
1083
def add_pkey(db, table, cols=None, recover=None):
1084
    '''Adds a primary key.
1085
    @param cols [sql_gen.Col,...] The columns in the primary key.
1086
        Defaults to the first column in the table.
1087
    @pre The table must not already have a primary key.
1088
    '''
1089
    table = sql_gen.as_Table(table)
1090
    if cols == None: cols = [pkey(db, table, recover)]
1091
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1092
    
1093
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1094
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1095
        log_ignore_excs=(DuplicateException,))
1096

    
1097
def add_not_null(db, col):
1098
    table = col.table
1099
    col = sql_gen.to_name_only_col(col)
1100
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1101
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1102

    
1103
def add_index_col(db, col, suffix, expr, nullable=True):
1104
    if sql_gen.index_col(col) != None: return # already has index col
1105
    
1106
    new_col = sql_gen.suffixed_col(col, suffix)
1107
    
1108
    # Add column
1109
    new_typed_col = sql_gen.TypedCol(new_col.name, db.col_info(col).type)
1110
    add_col(db, col.table, new_typed_col, comment='src: '+repr(col),
1111
        log_level=3)
1112
    new_col.name = new_typed_col.name # propagate any renaming
1113
    
1114
    update(db, col.table, [(new_col, expr)], in_place=True, cacheable=True,
1115
        log_level=3)
1116
    if not nullable: add_not_null(db, new_col)
1117
    add_index(db, new_col)
1118
    
1119
    col.table.index_cols[col.name] = new_col
1120

    
1121
# Controls when ensure_not_null() will use index columns
1122
not_null_index_cols_min_rows = 0 # rows; initially always use index columns
1123

    
1124
def ensure_not_null(db, col):
1125
    '''For params, see sql_gen.ensure_not_null()'''
1126
    expr = sql_gen.ensure_not_null(db, col)
1127
    
1128
    # If a nullable column in a temp table, add separate index column instead.
1129
    # Note that for small datasources, this adds 6-25% to the total import time.
1130
    if (sql_gen.is_temp_col(col) and isinstance(expr, sql_gen.EnsureNotNull)
1131
        and table_row_count(db, col.table) >= not_null_index_cols_min_rows):
1132
        add_index_col(db, col, '::NOT NULL', expr, nullable=False)
1133
        expr = sql_gen.index_col(col)
1134
    
1135
    return expr
1136

    
1137
already_indexed = object() # tells add_indexes() the pkey has already been added
1138

    
1139
def add_indexes(db, table, has_pkey=True):
1140
    '''Adds an index on all columns in a table.
1141
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1142
        index should be added on the first column.
1143
        * If already_indexed, the pkey is assumed to have already been added
1144
    '''
1145
    cols = table_cols(db, table)
1146
    if has_pkey:
1147
        if has_pkey is not already_indexed: add_pkey(db, table)
1148
        cols = cols[1:]
1149
    for col in cols: add_index(db, col, table)
1150

    
1151
def add_col(db, table, col, comment=None, **kw_args):
1152
    '''
1153
    @param col TypedCol Name may be versioned, so be sure to propagate any
1154
        renaming back to any source column for the TypedCol.
1155
    @param comment None|str SQL comment used to distinguish columns of the same
1156
        name from each other when they contain different data, to allow the
1157
        ADD COLUMN query to be cached. If not set, query will not be cached.
1158
    '''
1159
    assert isinstance(col, sql_gen.TypedCol)
1160
    
1161
    while True:
1162
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1163
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1164
        
1165
        try:
1166
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1167
            break
1168
        except DuplicateException:
1169
            col.name = next_version(col.name)
1170
            # try again with next version of name
1171

    
1172
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
1173
    constraints='PRIMARY KEY')
1174

    
1175
def add_row_num(db, table):
1176
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1177
    be the primary key.'''
1178
    add_col(db, table, row_num_typed_col, log_level=3)
1179

    
1180
def cast_temp_col(db, type_, col, errors_table=None):
1181
    '''Like cast(), but creates a new column with the cast values if the input
1182
    is a column.
1183
    @return The new column or cast value
1184
    '''
1185
    def cast_(col): return cast(db, type_, col, errors_table)
1186
    
1187
    try: col = sql_gen.underlying_col(col)
1188
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
1189
    
1190
    table = col.table
1191
    new_col = sql_gen.Col(sql_gen.concat(col.name, '::'+type_), table, col.srcs)
1192
    expr = cast_(col)
1193
    
1194
    # Add column
1195
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
1196
    add_col(db, table, new_typed_col, comment='src: '+repr(col))
1197
    new_col.name = new_typed_col.name # propagate any renaming
1198
    
1199
    update(db, table, [(new_col, expr)], in_place=True, cacheable=True)
1200
    add_index(db, new_col)
1201
    
1202
    return new_col
1203

    
1204
def drop_table(db, table):
1205
    table = sql_gen.as_Table(table)
1206
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1207

    
1208
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
1209
    '''Creates a table.
1210
    @param cols [sql_gen.TypedCol,...] The column names and types
1211
    @param has_pkey If set, the first column becomes the primary key.
1212
    @param col_indexes bool|[ref]
1213
        * If True, indexes will be added on all non-pkey columns.
1214
        * If a list reference, [0] will be set to a function to do this.
1215
          This can be used to delay index creation until the table is populated.
1216
    '''
1217
    table = sql_gen.as_Table(table)
1218
    
1219
    if has_pkey:
1220
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1221
        pkey.constraints = 'PRIMARY KEY'
1222
    
1223
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1224
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1225
    str_ += '\n);\n'
1226
    run_query(db, str_, cacheable=True, log_level=2)
1227
    
1228
    # Add indexes
1229
    if has_pkey: has_pkey = already_indexed
1230
    def add_indexes_(): add_indexes(db, table, has_pkey)
1231
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1232
    elif col_indexes: add_indexes_() # add now
1233

    
1234
def analyze(db, table):
1235
    table = sql_gen.as_Table(table)
1236
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1237

    
1238
def autoanalyze(db, table):
1239
    if db.autoanalyze: analyze(db, table)
1240

    
1241
def vacuum(db, table):
1242
    table = sql_gen.as_Table(table)
1243
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1244
        log_level=3))
1245

    
1246
def truncate(db, table, schema='public', **kw_args):
1247
    '''For params, see run_query()'''
1248
    table = sql_gen.as_Table(table, schema)
1249
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1250

    
1251
def empty_temp(db, tables):
1252
    if db.debug_temp: return # leave temp tables there for debugging
1253
    tables = lists.mk_seq(tables)
1254
    for table in tables: truncate(db, table, log_level=3)
1255

    
1256
def tables(db, schema_like='public', table_like='%', exact=False):
1257
    if exact: compare = '='
1258
    else: compare = 'LIKE'
1259
    
1260
    module = util.root_module(db.db)
1261
    if module == 'psycopg2':
1262
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1263
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1264
        return values(select(db, 'pg_tables', ['tablename'], conds,
1265
            order_by='tablename', log_level=4))
1266
    elif module == 'MySQLdb':
1267
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1268
            , cacheable=True, log_level=4))
1269
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1270

    
1271
def table_exists(db, table):
1272
    table = sql_gen.as_Table(table)
1273
    return list(tables(db, table.schema, table.name, exact=True)) != []
1274

    
1275
def function_exists(db, function):
1276
    function = sql_gen.as_Function(function)
1277
    
1278
    info_table = sql_gen.Table('routines', 'information_schema')
1279
    conds = [('routine_name', function.name)]
1280
    schema = function.schema
1281
    if schema != None: conds.append(('routine_schema', schema))
1282
    # Exclude trigger functions, since they cannot be called directly
1283
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
1284
    
1285
    return list(values(select(db, info_table, ['routine_name'], conds,
1286
        order_by='routine_schema', limit=1, log_level=4))) != []
1287
        # TODO: order_by search_path schema order
1288

    
1289
def errors_table(db, table, if_exists=True):
1290
    '''
1291
    @param if_exists If set, returns None if the errors table doesn't exist
1292
    @return None|sql_gen.Table
1293
    '''
1294
    table = sql_gen.as_Table(table)
1295
    if table.srcs != (): table = table.srcs[0]
1296
    
1297
    errors_table = sql_gen.suffixed_table(table, '.errors')
1298
    if if_exists and not table_exists(db, errors_table): return None
1299
    return errors_table
1300

    
1301
##### Database management
1302

    
1303
def empty_db(db, schema='public', **kw_args):
1304
    '''For kw_args, see tables()'''
1305
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1306

    
1307
##### Heuristic queries
1308

    
1309
def put(db, table, row, pkey_=None, row_ct_ref=None):
1310
    '''Recovers from errors.
1311
    Only works under PostgreSQL (uses INSERT RETURNING).
1312
    '''
1313
    row = sql_gen.ColDict(db, table, row)
1314
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1315
    
1316
    try:
1317
        cur = insert(db, table, row, pkey_, recover=True)
1318
        if row_ct_ref != None and cur.rowcount >= 0:
1319
            row_ct_ref[0] += cur.rowcount
1320
        return value(cur)
1321
    except DuplicateKeyException, e:
1322
        row = sql_gen.ColDict(db, table,
1323
            util.dict_subset_right_join(row, e.cols))
1324
        return value(select(db, table, [pkey_], row, recover=True))
1325

    
1326
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1327
    '''Recovers from errors'''
1328
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1329
    except StopIteration:
1330
        if not create: raise
1331
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1332

    
1333
def is_func_result(col):
1334
    return col.table.name.find('(') >= 0 and col.name == 'result'
1335

    
1336
def into_table_name(out_table, in_tables0, mapping, is_func):
1337
    def in_col_str(in_col):
1338
        in_col = sql_gen.remove_col_rename(in_col)
1339
        if isinstance(in_col, sql_gen.Col):
1340
            table = in_col.table
1341
            if table == in_tables0:
1342
                in_col = sql_gen.to_name_only_col(in_col)
1343
            elif is_func_result(in_col): in_col = table # omit col name
1344
        return str(in_col)
1345
    
1346
    str_ = str(out_table)
1347
    if is_func:
1348
        str_ += '('
1349
        
1350
        try: value_in_col = mapping['value']
1351
        except KeyError:
1352
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1353
                for k, v in mapping.iteritems()))
1354
        else: str_ += in_col_str(value_in_col)
1355
        
1356
        str_ += ')'
1357
    else:
1358
        out_col = 'rank'
1359
        try: in_col = mapping[out_col]
1360
        except KeyError: str_ += '_pkeys'
1361
        else: # has a rank column, so hierarchical
1362
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
1363
    return str_
1364

    
1365
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1366
    default=None, is_func=False, on_error=exc.raise_):
1367
    '''Recovers from errors.
1368
    Only works under PostgreSQL (uses INSERT RETURNING).
1369
    IMPORTANT: Must be run at the *beginning* of a transaction.
1370
    @param in_tables The main input table to select from, followed by a list of
1371
        tables to join with it using the main input table's pkey
1372
    @param mapping dict(out_table_col=in_table_col, ...)
1373
        * out_table_col: str (*not* sql_gen.Col)
1374
        * in_table_col: sql_gen.Col|literal-value
1375
    @param into The table to contain the output and input pkeys.
1376
        Defaults to `out_table.name+'_pkeys'`.
1377
    @param default The *output* column to use as the pkey for missing rows.
1378
        If this output column does not exist in the mapping, uses None.
1379
    @param is_func Whether out_table is the name of a SQL function, not a table
1380
    @return sql_gen.Col Where the output pkeys are made available
1381
    '''
1382
    out_table = sql_gen.as_Table(out_table)
1383
    
1384
    def log_debug(msg): db.log_debug(msg, level=1.5)
1385
    def col_ustr(str_):
1386
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1387
    
1388
    log_debug('********** New iteration **********')
1389
    log_debug('Inserting these input columns into '+strings.as_tt(
1390
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1391
    
1392
    is_function = function_exists(db, out_table)
1393
    
1394
    if is_function: out_pkey = 'result'
1395
    else: out_pkey = pkey(db, out_table, recover=True)
1396
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1397
    
1398
    if mapping == {}: # need at least one column for INSERT SELECT
1399
        mapping = {out_pkey: None} # ColDict will replace with default value
1400
    
1401
    # Create input joins from list of input tables
1402
    in_tables_ = in_tables[:] # don't modify input!
1403
    in_tables0 = in_tables_.pop(0) # first table is separate
1404
    errors_table_ = errors_table(db, in_tables0)
1405
    in_pkey = pkey(db, in_tables0, recover=True)
1406
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1407
    input_joins = [in_tables0]+[sql_gen.Join(v,
1408
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1409
    
1410
    if into == None:
1411
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1412
    into = sql_gen.as_Table(into)
1413
    
1414
    # Set column sources
1415
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1416
    for col in in_cols:
1417
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1418
    
1419
    log_debug('Joining together input tables into temp table')
1420
    # Place in new table for speed and so don't modify input if values edited
1421
    in_table = sql_gen.Table('in')
1422
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1423
        preserve=[in_pkey_col], start=0))
1424
    input_joins = [in_table]
1425
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1426
    
1427
    mapping = sql_gen.ColDict(db, out_table, mapping)
1428
        # after applying dicts.join() because that returns a plain dict
1429
    
1430
    # Resolve default value column
1431
    if default != None:
1432
        try: default = mapping[default]
1433
        except KeyError:
1434
            db.log_debug('Default value column '
1435
                +strings.as_tt(strings.repr_no_u(default))
1436
                +' does not exist in mapping, falling back to None', level=2.1)
1437
            default = None
1438
    
1439
    pkeys_names = [in_pkey, out_pkey]
1440
    pkeys_cols = [in_pkey_col, out_pkey_col]
1441
    
1442
    pkeys_table_exists_ref = [False]
1443
    def insert_into_pkeys(joins, cols, distinct=False):
1444
        kw_args = {}
1445
        if distinct: kw_args.update(dict(distinct_on=[in_pkey_col]))
1446
        query = mk_select(db, joins, cols, order_by=None, start=0, **kw_args)
1447
        
1448
        if pkeys_table_exists_ref[0]:
1449
            insert_select(db, into, pkeys_names, query)
1450
        else:
1451
            run_query_into(db, query, into=into)
1452
            pkeys_table_exists_ref[0] = True
1453
    
1454
    limit_ref = [None]
1455
    conds = set()
1456
    distinct_on = sql_gen.ColDict(db, out_table)
1457
    def mk_main_select(joins, cols):
1458
        distinct_on_cols = [c.to_Col() for c in distinct_on.values()]
1459
        return mk_select(db, joins, cols, conds, distinct_on_cols,
1460
            limit=limit_ref[0], start=0)
1461
    
1462
    exc_strs = set()
1463
    def log_exc(e):
1464
        e_str = exc.str_(e, first_line_only=True)
1465
        log_debug('Caught exception: '+e_str)
1466
        assert e_str not in exc_strs # avoid infinite loops
1467
        exc_strs.add(e_str)
1468
    
1469
    def remove_all_rows():
1470
        log_debug('Ignoring all rows')
1471
        limit_ref[0] = 0 # just create an empty pkeys table
1472
    
1473
    def ignore(in_col, value, e):
1474
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1475
            e.cause.pgerror)
1476
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
1477
            +strings.as_tt(repr(value)))
1478
    
1479
    def remove_rows(in_col, value, e):
1480
        ignore(in_col, value, e)
1481
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1482
        assert cond not in conds # avoid infinite loops
1483
        conds.add(cond)
1484
    
1485
    def invalid2null(in_col, value, e):
1486
        ignore(in_col, value, e)
1487
        update(db, in_table, [(in_col, None)],
1488
            sql_gen.ColValueCond(in_col, value))
1489
    
1490
    def insert_pkeys_table(which):
1491
        return sql_gen.Table(sql_gen.concat(in_table.name,
1492
            '_insert_'+which+'_pkeys'))
1493
    insert_out_pkeys = insert_pkeys_table('out')
1494
    insert_in_pkeys = insert_pkeys_table('in')
1495
    
1496
    # Do inserts and selects
1497
    join_cols = sql_gen.ColDict(db, out_table)
1498
    while True:
1499
        if limit_ref[0] == 0: # special case
1500
            log_debug('Creating an empty pkeys table')
1501
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1502
                limit=limit_ref[0]), into=insert_out_pkeys)
1503
            break # don't do main case
1504
        
1505
        has_joins = join_cols != {}
1506
        
1507
        log_debug('Trying to insert new rows')
1508
        
1509
        # Prepare to insert new rows
1510
        insert_joins = input_joins[:] # don't modify original!
1511
        insert_args = dict(recover=True, cacheable=False)
1512
        if has_joins:
1513
            insert_args.update(dict(ignore=True))
1514
        else:
1515
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1516
        main_select = mk_main_select(insert_joins, mapping.values())
1517
        
1518
        def main_insert():
1519
            if is_function:
1520
                log_debug('Calling function on input rows')
1521
                args = dict(((k.name, v) for k, v in mapping.iteritems()))
1522
                func_call = sql_gen.NamedCol(out_pkey,
1523
                    sql_gen.FunctionCall(out_table, **args))
1524
                insert_into_pkeys(input_joins, [in_pkey_col, func_call])
1525
                return None
1526
            else:
1527
                return insert_select(db, out_table, mapping.keys(), main_select,
1528
                    **insert_args)
1529
        
1530
        try:
1531
            cur = with_savepoint(db, main_insert)
1532
            break # insert successful
1533
        except MissingCastException, e:
1534
            log_exc(e)
1535
            
1536
            out_col = e.col
1537
            type_ = e.type
1538
            
1539
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1540
                +strings.as_tt(type_))
1541
            mapping[out_col] = cast_temp_col(db, type_, mapping[out_col],
1542
                errors_table_)
1543
        except DuplicateKeyException, e:
1544
            log_exc(e)
1545
            
1546
            old_join_cols = join_cols.copy()
1547
            distinct_on.update(util.dict_subset(mapping, e.cols))
1548
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1549
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1550
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1551
            assert join_cols != old_join_cols # avoid infinite loops
1552
        except NullValueException, e:
1553
            log_exc(e)
1554
            
1555
            out_col, = e.cols
1556
            try: in_col = mapping[out_col]
1557
            except KeyError:
1558
                log_debug('Missing mapping for NOT NULL column '+out_col)
1559
                remove_all_rows()
1560
            else: remove_rows(in_col, None, e)
1561
        except FunctionValueException, e:
1562
            log_exc(e)
1563
            
1564
            func_name = e.name
1565
            value = e.value
1566
            for out_col, in_col in mapping.iteritems():
1567
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1568
                invalid2null(in_col, value, e)
1569
        except DatabaseErrors, e:
1570
            log_exc(e)
1571
            
1572
            log_debug('No handler for exception')
1573
            on_error(e)
1574
            remove_all_rows()
1575
        # after exception handled, rerun loop with additional constraints
1576
    
1577
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
1578
        row_ct_ref[0] += cur.rowcount
1579
    
1580
    if is_function: pass # pkeys table already created
1581
    elif has_joins:
1582
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1583
        log_debug('Getting output table pkeys of existing/inserted rows')
1584
        insert_into_pkeys(select_joins, pkeys_cols, distinct=True)
1585
    else:
1586
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1587
        
1588
        log_debug('Getting input table pkeys of inserted rows')
1589
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1590
            into=insert_in_pkeys)
1591
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1592
        
1593
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1594
            insert_in_pkeys)
1595
        
1596
        log_debug('Combining output and input pkeys in inserted order')
1597
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1598
            {row_num_col: sql_gen.join_same_not_null})]
1599
        insert_into_pkeys(pkey_joins, pkeys_names)
1600
        
1601
        empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
1602
    
1603
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1604
    add_pkey(db, into)
1605
    
1606
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1607
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1608
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1609
        # must use join_same_not_null or query will take forever
1610
    insert_into_pkeys(missing_rows_joins,
1611
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1612
    
1613
    assert table_row_count(db, into) == table_row_count(db, in_table)
1614
    
1615
    empty_temp(db, in_table)
1616
    
1617
    srcs = []
1618
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1619
    return sql_gen.Col(out_pkey, into, srcs)
1620

    
1621
##### Data cleanup
1622

    
1623
def cleanup_table(db, table, cols):
1624
    table = sql_gen.as_Table(table)
1625
    cols = map(sql_gen.as_Col, cols)
1626
    
1627
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1628
        +db.esc_value(r'\N')+')')
1629
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1630
        for v in cols]
1631
    
1632
    update(db, table, changes, in_place=True)
(24-24/36)