Project

General

Profile

1
# Database access
2

    
3
import copy
4
import re
5
import time
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
import profiling
13
from Proxy import Proxy
14
import rand
15
import sql_gen
16
import strings
17
import util
18

    
19
##### Exceptions
20

    
21
def get_cur_query(cur, input_query=None):
22
    raw_query = None
23
    if hasattr(cur, 'query'): raw_query = cur.query
24
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
25
    
26
    if raw_query != None: return raw_query
27
    else: return '[input] '+strings.ustr(input_query)
28

    
29
def _add_cursor_info(e, *args, **kw_args):
30
    '''For params, see get_cur_query()'''
31
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
32

    
33
class DbException(exc.ExceptionWithCause):
34
    def __init__(self, msg, cause=None, cur=None):
35
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
36
        if cur != None: _add_cursor_info(self, cur)
37

    
38
class ExceptionWithName(DbException):
39
    def __init__(self, name, cause=None):
40
        DbException.__init__(self, 'for name: '
41
            +strings.as_tt(strings.ustr(name)), cause)
42
        self.name = name
43

    
44
class ExceptionWithValue(DbException):
45
    def __init__(self, value, cause=None):
46
        DbException.__init__(self, 'for value: '
47
            +strings.as_tt(strings.urepr(value)), cause)
48
        self.value = value
49

    
50
class ExceptionWithNameType(DbException):
51
    def __init__(self, type_, name, cause=None):
52
        DbException.__init__(self, 'for type: '+strings.as_tt(strings.ustr(
53
            type_))+'; name: '+strings.as_tt(name), cause)
54
        self.type = type_
55
        self.name = name
56

    
57
class ConstraintException(DbException):
58
    def __init__(self, name, cond, cols, cause=None):
59
        msg = 'Violated '+strings.as_tt(name)+' constraint'
60
        if cond != None: msg += ' with condition '+strings.as_tt(cond)
61
        if cols != []: msg += ' on columns: '+strings.as_tt(', '.join(cols))
62
        DbException.__init__(self, msg, cause)
63
        self.name = name
64
        self.cond = cond
65
        self.cols = cols
66

    
67
class MissingCastException(DbException):
68
    def __init__(self, type_, col=None, cause=None):
69
        msg = 'Missing cast to type '+strings.as_tt(type_)
70
        if col != None: msg += ' on column: '+strings.as_tt(col)
71
        DbException.__init__(self, msg, cause)
72
        self.type = type_
73
        self.col = col
74

    
75
class EncodingException(ExceptionWithName): pass
76

    
77
class DuplicateKeyException(ConstraintException): pass
78

    
79
class NullValueException(ConstraintException): pass
80

    
81
class CheckException(ConstraintException): pass
82

    
83
class InvalidValueException(ExceptionWithValue): pass
84

    
85
class DuplicateException(ExceptionWithNameType): pass
86

    
87
class DoesNotExistException(ExceptionWithNameType): pass
88

    
89
class EmptyRowException(DbException): pass
90

    
91
##### Warnings
92

    
93
class DbWarning(UserWarning): pass
94

    
95
##### Result retrieval
96

    
97
def col_names(cur): return (col[0] for col in cur.description)
98

    
99
def rows(cur): return iter(lambda: cur.fetchone(), None)
100

    
101
def consume_rows(cur):
102
    '''Used to fetch all rows so result will be cached'''
103
    iters.consume_iter(rows(cur))
104

    
105
def next_row(cur): return rows(cur).next()
106

    
107
def row(cur):
108
    row_ = next_row(cur)
109
    consume_rows(cur)
110
    return row_
111

    
112
def next_value(cur): return next_row(cur)[0]
113

    
114
def value(cur): return row(cur)[0]
115

    
116
def values(cur): return iters.func_iter(lambda: next_value(cur))
117

    
118
def value_or_none(cur):
119
    try: return value(cur)
120
    except StopIteration: return None
121

    
122
##### Escaping
123

    
124
def esc_name_by_module(module, name):
125
    if module == 'psycopg2' or module == None: quote = '"'
126
    elif module == 'MySQLdb': quote = '`'
127
    else: raise NotImplementedError("Can't escape name for "+module+' database')
128
    return sql_gen.esc_name(name, quote)
129

    
130
def esc_name_by_engine(engine, name, **kw_args):
131
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
132

    
133
def esc_name(db, name, **kw_args):
134
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
135

    
136
def qual_name(db, schema, table):
137
    def esc_name_(name): return esc_name(db, name)
138
    table = esc_name_(table)
139
    if schema != None: return esc_name_(schema)+'.'+table
140
    else: return table
141

    
142
##### Database connections
143

    
144
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
145

    
146
db_engines = {
147
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
148
    'PostgreSQL': ('psycopg2', {}),
149
}
150

    
151
DatabaseErrors_set = set([DbException])
152
DatabaseErrors = tuple(DatabaseErrors_set)
153

    
154
def _add_module(module):
155
    DatabaseErrors_set.add(module.DatabaseError)
156
    global DatabaseErrors
157
    DatabaseErrors = tuple(DatabaseErrors_set)
158

    
159
def db_config_str(db_config):
160
    return db_config['engine']+' database '+db_config['database']
161

    
162
log_debug_none = lambda msg, level=2: None
163

    
164
class DbConn:
165
    def __init__(self, db_config, autocommit=True, caching=True,
166
        log_debug=log_debug_none, debug_temp=False, src=None):
167
        '''
168
        @param debug_temp Whether temporary objects should instead be permanent.
169
            This assists in debugging the internal objects used by the program.
170
        @param src In autocommit mode, will be included in a comment in every
171
            query, to help identify the data source in pg_stat_activity.
172
        '''
173
        self.db_config = db_config
174
        self.autocommit = autocommit
175
        self.caching = caching
176
        self.log_debug = log_debug
177
        self.debug = log_debug != log_debug_none
178
        self.debug_temp = debug_temp
179
        self.src = src
180
        self.autoanalyze = False
181
        self.autoexplain = False
182
        self.profile_row_ct = None
183
        
184
        self._savepoint = 0
185
        self._reset()
186
    
187
    def __getattr__(self, name):
188
        if name == '__dict__': raise Exception('getting __dict__')
189
        if name == 'db': return self._db()
190
        else: raise AttributeError()
191
    
192
    def __getstate__(self):
193
        state = copy.copy(self.__dict__) # shallow copy
194
        state['log_debug'] = None # don't pickle the debug callback
195
        state['_DbConn__db'] = None # don't pickle the connection
196
        return state
197
    
198
    def clear_cache(self): self.query_results = {}
199
    
200
    def _reset(self):
201
        self.clear_cache()
202
        assert self._savepoint == 0
203
        self._notices_seen = set()
204
        self.__db = None
205
    
206
    def connected(self): return self.__db != None
207
    
208
    def close(self):
209
        if not self.connected(): return
210
        
211
        # Record that the automatic transaction is now closed
212
        self._savepoint -= 1
213
        
214
        self.db.close()
215
        self._reset()
216
    
217
    def reconnect(self):
218
        # Do not do this in test mode as it would roll back everything
219
        if self.autocommit: self.close()
220
        # Connection will be reopened automatically on first query
221
    
222
    def _db(self):
223
        if self.__db == None:
224
            # Process db_config
225
            db_config = self.db_config.copy() # don't modify input!
226
            schemas = db_config.pop('schemas', None)
227
            module_name, mappings = db_engines[db_config.pop('engine')]
228
            module = __import__(module_name)
229
            _add_module(module)
230
            for orig, new in mappings.iteritems():
231
                try: util.rename_key(db_config, orig, new)
232
                except KeyError: pass
233
            
234
            # Connect
235
            self.__db = module.connect(**db_config)
236
            
237
            # Record that a transaction is already open
238
            self._savepoint += 1
239
            
240
            # Configure connection
241
            if hasattr(self.db, 'set_isolation_level'):
242
                import psycopg2.extensions
243
                self.db.set_isolation_level(
244
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
245
            if schemas != None:
246
                search_path = [self.esc_name(s) for s in schemas.split(',')]
247
                search_path.append(value(run_query(self, 'SHOW search_path',
248
                    log_level=4)))
249
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
250
                    log_level=3)
251
        
252
        return self.__db
253
    
254
    class DbCursor(Proxy):
255
        def __init__(self, outer):
256
            Proxy.__init__(self, outer.db.cursor())
257
            self.outer = outer
258
            self.query_results = outer.query_results
259
            self.query_lookup = None
260
            self.result = []
261
        
262
        def execute(self, query):
263
            self._is_insert = query.startswith('INSERT')
264
            self.query_lookup = query
265
            try:
266
                try: cur = self.inner.execute(query)
267
                finally: self.query = get_cur_query(self.inner, query)
268
            except Exception, e:
269
                self.result = e # cache the exception as the result
270
                self._cache_result()
271
                raise
272
            
273
            # Always cache certain queries
274
            query = sql_gen.lstrip(query)
275
            if query.startswith('CREATE') or query.startswith('ALTER'):
276
                # structural changes
277
                # Rest of query must be unique in the face of name collisions,
278
                # so don't cache ADD COLUMN unless it has distinguishing comment
279
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
280
                    self._cache_result()
281
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
282
                consume_rows(self) # fetch all rows so result will be cached
283
            
284
            return cur
285
        
286
        def fetchone(self):
287
            row = self.inner.fetchone()
288
            if row != None: self.result.append(row)
289
            # otherwise, fetched all rows
290
            else: self._cache_result()
291
            return row
292
        
293
        def _cache_result(self):
294
            # For inserts that return a result set, don't cache result set since
295
            # inserts are not idempotent. Other non-SELECT queries don't have
296
            # their result set read, so only exceptions will be cached (an
297
            # invalid query will always be invalid).
298
            if self.query_results != None and (not self._is_insert
299
                or isinstance(self.result, Exception)):
300
                
301
                assert self.query_lookup != None
302
                self.query_results[self.query_lookup] = self.CacheCursor(
303
                    util.dict_subset(dicts.AttrsDictView(self),
304
                    ['query', 'result', 'rowcount', 'description']))
305
        
306
        class CacheCursor:
307
            def __init__(self, cached_result): self.__dict__ = cached_result
308
            
309
            def execute(self, *args, **kw_args):
310
                if isinstance(self.result, Exception): raise self.result
311
                # otherwise, result is a rows list
312
                self.iter = iter(self.result)
313
            
314
            def fetchone(self):
315
                try: return self.iter.next()
316
                except StopIteration: return None
317
    
318
    def esc_value(self, value):
319
        try: str_ = self.mogrify('%s', [value])
320
        except NotImplementedError, e:
321
            module = util.root_module(self.db)
322
            if module == 'MySQLdb':
323
                import _mysql
324
                str_ = _mysql.escape_string(value)
325
            else: raise e
326
        return strings.to_unicode(str_)
327
    
328
    def esc_name(self, name): return esc_name(self, name) # calls global func
329
    
330
    def std_code(self, str_):
331
        '''Standardizes SQL code.
332
        * Ensures that string literals are prefixed by `E`
333
        '''
334
        if str_.startswith("'"): str_ = 'E'+str_
335
        return str_
336
    
337
    def can_mogrify(self):
338
        module = util.root_module(self.db)
339
        return module == 'psycopg2'
340
    
341
    def mogrify(self, query, params=None):
342
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
343
        else: raise NotImplementedError("Can't mogrify query")
344
    
345
    def set_encoding(self, encoding):
346
        encoding_str = sql_gen.Literal(encoding)
347
        run_query(self, 'SET NAMES '+encoding_str.to_str(self))
348
    
349
    def print_notices(self):
350
        if hasattr(self.db, 'notices'):
351
            for msg in self.db.notices:
352
                if msg not in self._notices_seen:
353
                    self._notices_seen.add(msg)
354
                    self.log_debug(msg, level=2)
355
    
356
    def run_query(self, query, cacheable=False, log_level=2,
357
        debug_msg_ref=None):
358
        '''
359
        @param log_ignore_excs The log_level will be increased by 2 if the query
360
            throws one of these exceptions.
361
        @param debug_msg_ref If specified, the log message will be returned in
362
            this instead of being output. This allows you to filter log messages
363
            depending on the result of the query.
364
        '''
365
        assert query != None
366
        
367
        if self.autocommit and self.src != None:
368
            query = sql_gen.esc_comment(self.src)+'\t'+query
369
        
370
        if not self.caching: cacheable = False
371
        used_cache = False
372
        
373
        if self.debug:
374
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
375
        try:
376
            # Get cursor
377
            if cacheable:
378
                try: cur = self.query_results[query]
379
                except KeyError: cur = self.DbCursor(self)
380
                else: used_cache = True
381
            else: cur = self.db.cursor()
382
            
383
            # Run query
384
            try: cur.execute(query)
385
            except Exception, e:
386
                _add_cursor_info(e, self, query)
387
                raise
388
            else: self.do_autocommit()
389
        finally:
390
            if self.debug:
391
                profiler.stop(self.profile_row_ct)
392
                
393
                ## Log or return query
394
                
395
                query = strings.ustr(get_cur_query(cur, query))
396
                # Put the src comment on a separate line in the log file
397
                query = query.replace('\t', '\n', 1)
398
                
399
                msg = 'DB query: '
400
                
401
                if used_cache: msg += 'cache hit'
402
                elif cacheable: msg += 'cache miss'
403
                else: msg += 'non-cacheable'
404
                
405
                msg += ':\n'+profiler.msg()+'\n'+strings.as_code(query, 'SQL')
406
                
407
                if debug_msg_ref != None: debug_msg_ref[0] = msg
408
                else: self.log_debug(msg, log_level)
409
                
410
                self.print_notices()
411
        
412
        return cur
413
    
414
    def is_cached(self, query): return query in self.query_results
415
    
416
    def with_autocommit(self, func):
417
        import psycopg2.extensions
418
        
419
        prev_isolation_level = self.db.isolation_level
420
        self.db.set_isolation_level(
421
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
422
        try: return func()
423
        finally: self.db.set_isolation_level(prev_isolation_level)
424
    
425
    def with_savepoint(self, func):
426
        top = self._savepoint == 0
427
        savepoint = 'level_'+str(self._savepoint)
428
        
429
        if self.debug:
430
            self.log_debug('Begin transaction', level=4)
431
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
432
        
433
        # Must happen before running queries so they don't get autocommitted
434
        self._savepoint += 1
435
        
436
        if top: query = 'START TRANSACTION ISOLATION LEVEL READ COMMITTED'
437
        else: query = 'SAVEPOINT '+savepoint
438
        self.run_query(query, log_level=4)
439
        try:
440
            return func()
441
            if top: self.run_query('COMMIT', log_level=4)
442
        except:
443
            if top: query = 'ROLLBACK'
444
            else: query = 'ROLLBACK TO SAVEPOINT '+savepoint
445
            self.run_query(query, log_level=4)
446
            
447
            raise
448
        finally:
449
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
450
            # "The savepoint remains valid and can be rolled back to again"
451
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
452
            if not top:
453
                self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
454
            
455
            self._savepoint -= 1
456
            assert self._savepoint >= 0
457
            
458
            if self.debug:
459
                profiler.stop(self.profile_row_ct)
460
                self.log_debug('End transaction\n'+profiler.msg(), level=4)
461
            
462
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
463
    
464
    def do_autocommit(self):
465
        '''Autocommits if outside savepoint'''
466
        assert self._savepoint >= 1
467
        if self.autocommit and self._savepoint == 1:
468
            self.log_debug('Autocommitting', level=4)
469
            self.db.commit()
470
    
471
    def col_info(self, col, cacheable=True):
472
        table = sql_gen.Table('columns', 'information_schema')
473
        cols = [sql_gen.Col('data_type'), sql_gen.Col('udt_name'),
474
            'column_default', sql_gen.Cast('boolean',
475
            sql_gen.Col('is_nullable'))]
476
        
477
        conds = [('table_name', col.table.name),
478
            ('column_name', strings.ustr(col.name))]
479
        schema = col.table.schema
480
        if schema != None: conds.append(('table_schema', schema))
481
        
482
        cur = select(self, table, cols, conds, order_by='table_schema', limit=1,
483
            cacheable=cacheable, log_level=4) # TODO: order by search_path order
484
        try: type_, extra_type, default, nullable = row(cur)
485
        except StopIteration: raise sql_gen.NoUnderlyingTableException(col)
486
        default = sql_gen.as_Code(default, self)
487
        if type_ == 'USER-DEFINED': type_ = extra_type
488
        elif type_ == 'ARRAY':
489
            type_ = sql_gen.ArrayType(strings.remove_prefix('_', extra_type,
490
                require=True))
491
        
492
        return sql_gen.TypedCol(col.name, type_, default, nullable)
493
    
494
    def TempFunction(self, name):
495
        if self.debug_temp: schema = None
496
        else: schema = 'pg_temp'
497
        return sql_gen.Function(name, schema)
498

    
499
connect = DbConn
500

    
501
##### Recoverable querying
502

    
503
def parse_exception(db, e, recover=False):
504
    msg = strings.ustr(e.args[0])
505
    msg = re.sub(r'^(?:PL/Python: )?ValueError: ', r'', msg)
506
    
507
    match = re.match(r'^invalid byte sequence for encoding "(.+?)":', msg)
508
    if match:
509
        encoding, = match.groups()
510
        raise EncodingException(encoding, e)
511
    
512
    def make_DuplicateKeyException(constraint, e):
513
        cols = []
514
        cond = None
515
        if recover: # need auto-rollback to run index_cols()
516
            try:
517
                cols = index_cols(db, constraint)
518
                cond = index_cond(db, constraint)
519
            except NotImplementedError: pass
520
        return DuplicateKeyException(constraint, cond, cols, e)
521
    
522
    match = re.match(r'^duplicate key value violates unique constraint "(.+?)"',
523
        msg)
524
    if match:
525
        constraint, = match.groups()
526
        raise make_DuplicateKeyException(constraint, e)
527
    
528
    match = re.match(r'^could not create unique index "(.+?)"\n'
529
        r'DETAIL:  Key .+? is duplicated', msg)
530
    if match:
531
        constraint, = match.groups()
532
        raise DuplicateKeyException(constraint, None, [], e)
533
    
534
    match = re.match(r'^null value in column "(.+?)" violates not-null'
535
        r' constraint', msg)
536
    if match:
537
        col, = match.groups()
538
        raise NullValueException('NOT NULL', None, [col], e)
539
    
540
    match = re.match(r'^new row for relation "(.+?)" violates check '
541
        r'constraint "(.+?)"', msg)
542
    if match:
543
        table, constraint = match.groups()
544
        constraint = sql_gen.Col(constraint, table)
545
        cond = None
546
        if recover: # need auto-rollback to run constraint_cond()
547
            try: cond = constraint_cond(db, constraint)
548
            except NotImplementedError: pass
549
        raise CheckException(constraint.to_str(db), cond, [], e)
550
    
551
    match = re.match(r'^(?:invalid input (?:syntax|value)\b[^:]*'
552
        r'|.+? out of range)(?:: "(.+?)")?', msg)
553
    if match:
554
        value, = match.groups()
555
        value = util.do_ignore_none(strings.to_unicode, value)
556
        raise InvalidValueException(value, e)
557
    
558
    match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
559
        r'is of type', msg)
560
    if match:
561
        col, type_ = match.groups()
562
        raise MissingCastException(type_, col, e)
563
    
564
    match = re.match(r'^could not determine polymorphic type because '
565
        r'input has type "unknown"', msg)
566
    if match: raise MissingCastException('text', None, e)
567
    
568
    match = re.match(r'^.+? types (.+?) and (.+?) cannot be matched', msg)
569
    if match:
570
        type0, type1 = match.groups()
571
        raise MissingCastException(type0, None, e)
572
    
573
    typed_name_re = r'^(\S+) "?(.+?)"?(?: of relation ".+?")?'
574
    
575
    match = re.match(typed_name_re+r'.*? already exists', msg)
576
    if match:
577
        type_, name = match.groups()
578
        raise DuplicateException(type_, name, e)
579
    
580
    match = re.match(r'more than one (\S+) named ""(.+?)""', msg)
581
    if match:
582
        type_, name = match.groups()
583
        raise DuplicateException(type_, name, e)
584
    
585
    match = re.match(typed_name_re+r' does not exist', msg)
586
    if match:
587
        type_, name = match.groups()
588
        if type_ == 'function':
589
            match = re.match(r'^(.+?)\(.*\)$', name)
590
            if match: # includes params, so is call rather than cast to regproc
591
                function_name, = match.groups()
592
                func = sql_gen.Function(function_name)
593
                if function_exists(db, func) and msg.find('CAST') < 0:
594
                    # not found only because of a missing cast
595
                    type_ = function_param0_type(db, func)
596
                    col = None
597
                    if type_ == 'anyelement': type_ = 'text'
598
                    elif type_ == 'hstore': # cast just the value param
599
                        type_ = 'text'
600
                        col = 'value'
601
                    raise MissingCastException(type_, col, e)
602
        raise DoesNotExistException(type_, name, e)
603
    
604
    raise # no specific exception raised
605

    
606
def with_savepoint(db, func): return db.with_savepoint(func)
607

    
608
def run_query(db, query, recover=None, cacheable=False, log_level=2,
609
    log_ignore_excs=None, **kw_args):
610
    '''For params, see DbConn.run_query()'''
611
    if recover == None: recover = False
612
    if log_ignore_excs == None: log_ignore_excs = ()
613
    log_ignore_excs = tuple(log_ignore_excs)
614
    debug_msg_ref = [None]
615
    
616
    query = with_explain_comment(db, query)
617
    
618
    try:
619
        try:
620
            def run(): return db.run_query(query, cacheable, log_level,
621
                debug_msg_ref, **kw_args)
622
            if recover and not db.is_cached(query):
623
                return with_savepoint(db, run)
624
            else: return run() # don't need savepoint if cached
625
        except Exception, e:
626
            # Give failed EXPLAIN approximately the log_level of its query
627
            if query.startswith('EXPLAIN'): log_level -= 1
628
            
629
            parse_exception(db, e, recover)
630
    except log_ignore_excs:
631
        log_level += 2
632
        raise
633
    finally:
634
        if debug_msg_ref[0] != None: db.log_debug(debug_msg_ref[0], log_level)
635

    
636
##### Basic queries
637

    
638
def is_explainable(query):
639
    # See <http://www.postgresql.org/docs/8.3/static/sql-explain.html#AEN57749>
640
    return re.match(r'^(?:SELECT|INSERT|UPDATE|DELETE|VALUES|EXECUTE|DECLARE)\b'
641
        , query)
642

    
643
def explain(db, query, **kw_args):
644
    '''
645
    For params, see run_query().
646
    '''
647
    kw_args.setdefault('log_level', 4)
648
    
649
    return strings.ustr(strings.join_lines(values(run_query(db,
650
        'EXPLAIN '+query, recover=True, cacheable=True, **kw_args))))
651
        # not a higher log_level because it's useful to see what query is being
652
        # run before it's executed, which EXPLAIN effectively provides
653

    
654
def has_comment(query): return query.endswith('*/')
655

    
656
def with_explain_comment(db, query, **kw_args):
657
    if db.autoexplain and not has_comment(query) and is_explainable(query):
658
        query += '\n'+sql_gen.esc_comment(' EXPLAIN:\n'
659
            +explain(db, query, **kw_args))
660
    return query
661

    
662
def next_version(name):
663
    version = 1 # first existing name was version 0
664
    match = re.match(r'^(.*)#(\d+)$', name)
665
    if match:
666
        name, version = match.groups()
667
        version = int(version)+1
668
    return sql_gen.concat(name, '#'+str(version))
669

    
670
def lock_table(db, table, mode):
671
    table = sql_gen.as_Table(table)
672
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
673

    
674
def run_query_into(db, query, into=None, add_pkey_=False, **kw_args):
675
    '''Outputs a query to a temp table.
676
    For params, see run_query().
677
    '''
678
    if into == None: return run_query(db, query, **kw_args)
679
    
680
    assert isinstance(into, sql_gen.Table)
681
    
682
    into.is_temp = True
683
    # "temporary tables cannot specify a schema name", so remove schema
684
    into.schema = None
685
    
686
    kw_args['recover'] = True
687
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
688
    
689
    temp = not db.debug_temp # tables are permanent in debug_temp mode
690
    
691
    # Create table
692
    while True:
693
        create_query = 'CREATE'
694
        if temp: create_query += ' TEMP'
695
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
696
        
697
        try:
698
            cur = run_query(db, create_query, **kw_args)
699
                # CREATE TABLE AS sets rowcount to # rows in query
700
            break
701
        except DuplicateException, e:
702
            into.name = next_version(into.name)
703
            # try again with next version of name
704
    
705
    if add_pkey_: add_pkey_or_index(db, into, warn=True)
706
    
707
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
708
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
709
    # table is going to be used in complex queries, it is wise to run ANALYZE on
710
    # the temporary table after it is populated."
711
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
712
    # If into is not a temp table, ANALYZE is useful but not required.
713
    analyze(db, into)
714
    
715
    return cur
716

    
717
order_by_pkey = object() # tells mk_select() to order by the pkey
718

    
719
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
720

    
721
def mk_select(db, tables=None, fields=None, conds=None, distinct_on=[],
722
    limit=None, start=None, order_by=order_by_pkey, default_table=None,
723
    explain=True):
724
    '''
725
    @param tables The single table to select from, or a list of tables to join
726
        together, with tables after the first being sql_gen.Join objects
727
    @param fields Use None to select all fields in the table
728
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
729
        * container can be any iterable type
730
        * compare_left_side: sql_gen.Code|str (for col name)
731
        * compare_right_side: sql_gen.ValueCond|literal value
732
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
733
        use all columns
734
    @return query
735
    '''
736
    # Parse tables param
737
    tables = lists.mk_seq(tables)
738
    tables = list(tables) # don't modify input! (list() copies input)
739
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
740
    
741
    # Parse other params
742
    if conds == None: conds = []
743
    elif dicts.is_dict(conds): conds = conds.items()
744
    conds = list(conds) # don't modify input! (list() copies input)
745
    assert limit == None or isinstance(limit, (int, long))
746
    assert start == None or isinstance(start, (int, long))
747
    if limit == 0: order_by = None
748
    if order_by is order_by_pkey:
749
        if lists.is_seq(distinct_on) and distinct_on: order_by = distinct_on[0]
750
        elif table0 != None: order_by = table_order_by(db, table0, recover=True)
751
        else: order_by = None
752
    
753
    query = 'SELECT'
754
    
755
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
756
    
757
    # DISTINCT ON columns
758
    if distinct_on != []:
759
        query += '\nDISTINCT'
760
        if distinct_on is not distinct_on_all:
761
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
762
    
763
    # Columns
764
    if query.find('\n') >= 0: whitespace = '\n'
765
    else: whitespace = ' '
766
    if fields == None: query += whitespace+'*'
767
    else:
768
        assert fields != []
769
        if len(fields) > 1: whitespace = '\n'
770
        query += whitespace+('\n, '.join(map(parse_col, fields)))
771
    
772
    # Main table
773
    if query.find('\n') >= 0 or len(tables) > 0: whitespace = '\n'
774
    else: whitespace = ' '
775
    if table0 != None: query += whitespace+'FROM '+table0.to_str(db)
776
    
777
    # Add joins
778
    left_table = table0
779
    for join_ in tables:
780
        table = join_.table
781
        
782
        # Parse special values
783
        if join_.type_ is sql_gen.filter_out: # filter no match
784
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
785
                sql_gen.CompareCond(None, '~=')))
786
        
787
        query += '\n'+join_.to_str(db, left_table)
788
        
789
        left_table = table
790
    
791
    missing = True
792
    if conds != []:
793
        if len(conds) == 1: whitespace = ' '
794
        else: whitespace = '\n'
795
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
796
            .to_str(db) for l, r in conds], 'WHERE')
797
    if order_by != None:
798
        query += '\nORDER BY '+sql_gen.as_Col(order_by).to_str(db)
799
    if limit != None: query += '\nLIMIT '+str(limit)
800
    if start != None:
801
        if start != 0: query += '\nOFFSET '+str(start)
802
    
803
    if explain: query = with_explain_comment(db, query)
804
    
805
    return query
806

    
807
def select(db, *args, **kw_args):
808
    '''For params, see mk_select() and run_query()'''
809
    recover = kw_args.pop('recover', None)
810
    cacheable = kw_args.pop('cacheable', True)
811
    log_level = kw_args.pop('log_level', 2)
812
    
813
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
814
        log_level=log_level)
815

    
816
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
817
    embeddable=False, ignore=False, src=None):
818
    '''
819
    @param returning str|None An inserted column (such as pkey) to return
820
    @param embeddable Whether the query should be embeddable as a nested SELECT.
821
        Warning: If you set this and cacheable=True when the query is run, the
822
        query will be fully cached, not just if it raises an exception.
823
    @param ignore Whether to ignore duplicate keys.
824
    @param src Will be included in the name of any created function, to help
825
        identify the data source in pg_stat_activity.
826
    '''
827
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
828
    if cols == []: cols = None # no cols (all defaults) = unknown col names
829
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
830
    if select_query == None: select_query = 'DEFAULT VALUES'
831
    if returning != None: returning = sql_gen.as_Col(returning, table)
832
    
833
    first_line = 'INSERT INTO '+table.to_str(db)
834
    
835
    def mk_insert(select_query):
836
        query = first_line
837
        if cols != None:
838
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
839
        query += '\n'+select_query
840
        
841
        if returning != None:
842
            returning_name_col = sql_gen.to_name_only_col(returning)
843
            query += '\nRETURNING '+returning_name_col.to_str(db)
844
        
845
        return query
846
    
847
    return_type = sql_gen.CustomCode('unknown')
848
    if returning != None: return_type = sql_gen.ColType(returning)
849
    
850
    if ignore:
851
        # Always return something to set the correct rowcount
852
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
853
        
854
        embeddable = True # must use function
855
        
856
        if cols == None: row = [sql_gen.Col(sql_gen.all_cols, 'row')]
857
        else: row = [sql_gen.Col(c.name, 'row') for c in cols]
858
        
859
        query = sql_gen.RowExcIgnore(sql_gen.RowType(table), select_query,
860
            sql_gen.ReturnQuery(mk_insert(sql_gen.Values(row).to_str(db))),
861
            cols)
862
    else: query = mk_insert(select_query)
863
    
864
    if embeddable:
865
        # Create function
866
        function_name = sql_gen.clean_name(first_line)
867
        if src != None: function_name = src+': '+function_name
868
        while True:
869
            try:
870
                func = db.TempFunction(function_name)
871
                def_ = sql_gen.FunctionDef(func, sql_gen.SetOf(return_type),
872
                    query)
873
                
874
                run_query(db, def_.to_str(db), recover=True, cacheable=True,
875
                    log_ignore_excs=(DuplicateException,))
876
                break # this version was successful
877
            except DuplicateException, e:
878
                function_name = next_version(function_name)
879
                # try again with next version of name
880
        
881
        # Return query that uses function
882
        cols = None
883
        if returning != None: cols = [returning]
884
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(func), cols)
885
            # AS clause requires function alias
886
        return mk_select(db, func_table, order_by=None)
887
    
888
    return query
889

    
890
def insert_select(db, table, *args, **kw_args):
891
    '''For params, see mk_insert_select() and run_query_into()
892
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
893
        values in
894
    '''
895
    returning = kw_args.get('returning', None)
896
    ignore = kw_args.get('ignore', False)
897
    
898
    into = kw_args.pop('into', None)
899
    if into != None: kw_args['embeddable'] = True
900
    recover = kw_args.pop('recover', None)
901
    if ignore: recover = True
902
    cacheable = kw_args.pop('cacheable', True)
903
    log_level = kw_args.pop('log_level', 2)
904
    
905
    rowcount_only = ignore and returning == None # keep NULL rows on server
906
    if rowcount_only: into = sql_gen.Table('rowcount')
907
    
908
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
909
        into, recover=recover, cacheable=cacheable, log_level=log_level)
910
    if rowcount_only: empty_temp(db, into)
911
    autoanalyze(db, table)
912
    return cur
913

    
914
default = sql_gen.default # tells insert() to use the default value for a column
915

    
916
def insert(db, table, row, *args, **kw_args):
917
    '''For params, see insert_select()'''
918
    ignore = kw_args.pop('ignore', False)
919
    if ignore: kw_args.setdefault('recover', True)
920
    
921
    if lists.is_seq(row): cols = None
922
    else:
923
        cols = row.keys()
924
        row = row.values()
925
    row = list(row) # ensure that "== []" works
926
    
927
    if row == []: query = None
928
    else: query = sql_gen.Values(row).to_str(db)
929
    
930
    try: return insert_select(db, table, cols, query, *args, **kw_args)
931
    except (DuplicateKeyException, NullValueException):
932
        if not ignore: raise
933
        return None
934

    
935
def mk_update(db, table, changes=None, cond=None, in_place=False,
936
    cacheable_=True):
937
    '''
938
    @param changes [(col, new_value),...]
939
        * container can be any iterable type
940
        * col: sql_gen.Code|str (for col name)
941
        * new_value: sql_gen.Code|literal value
942
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
943
    @param in_place If set, locks the table and updates rows in place.
944
        This avoids creating dead rows in PostgreSQL.
945
        * cond must be None
946
    @param cacheable_ Whether column structure information used to generate the
947
        query can be cached
948
    @return str query
949
    '''
950
    table = sql_gen.as_Table(table)
951
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
952
        for c, v in changes]
953
    
954
    if in_place:
955
        assert cond == None
956
        
957
        def col_type(col):
958
            return sql_gen.canon_type(db.col_info(
959
                sql_gen.with_default_table(c, table), cacheable_).type)
960
        changes = [(c, v, col_type(c)) for c, v in changes]
961
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
962
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '+t+'\nUSING '
963
            +v.to_str(db) for c, v, t in changes))
964
    else:
965
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
966
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
967
            for c, v in changes))
968
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
969
    
970
    query = with_explain_comment(db, query)
971
    
972
    return query
973

    
974
def update(db, table, *args, **kw_args):
975
    '''For params, see mk_update() and run_query()'''
976
    recover = kw_args.pop('recover', None)
977
    cacheable = kw_args.pop('cacheable', False)
978
    log_level = kw_args.pop('log_level', 2)
979
    
980
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
981
        cacheable, log_level=log_level)
982
    autoanalyze(db, table)
983
    return cur
984

    
985
def mk_delete(db, table, cond=None):
986
    '''
987
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
988
    @return str query
989
    '''
990
    query = 'DELETE FROM '+table.to_str(db)
991
    if cond != None: query += '\nWHERE '+cond.to_str(db)
992
    
993
    query = with_explain_comment(db, query)
994
    
995
    return query
996

    
997
def delete(db, table, *args, **kw_args):
998
    '''For params, see mk_delete() and run_query()'''
999
    recover = kw_args.pop('recover', None)
1000
    cacheable = kw_args.pop('cacheable', True)
1001
    log_level = kw_args.pop('log_level', 2)
1002
    
1003
    cur = run_query(db, mk_delete(db, table, *args, **kw_args), recover,
1004
        cacheable, log_level=log_level)
1005
    autoanalyze(db, table)
1006
    return cur
1007

    
1008
def last_insert_id(db):
1009
    module = util.root_module(db.db)
1010
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
1011
    elif module == 'MySQLdb': return db.insert_id()
1012
    else: return None
1013

    
1014
def define_func(db, def_):
1015
    func = def_.function
1016
    while True:
1017
        try:
1018
            run_query(db, def_.to_str(db), recover=True, cacheable=True,
1019
                log_ignore_excs=(DuplicateException,))
1020
            break # successful
1021
        except DuplicateException:
1022
            func.name = next_version(func.name)
1023
            # try again with next version of name
1024

    
1025
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
1026
    '''Creates a mapping from original column names (which may have collisions)
1027
    to names that will be distinct among the columns' tables.
1028
    This is meant to be used for several tables that are being joined together.
1029
    @param cols The columns to combine. Duplicates will be removed.
1030
    @param into The table for the new columns.
1031
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
1032
        columns will be included in the mapping even if they are not in cols.
1033
        The tables of the provided Col objects will be changed to into, so make
1034
        copies of them if you want to keep the original tables.
1035
    @param as_items Whether to return a list of dict items instead of a dict
1036
    @return dict(orig_col=new_col, ...)
1037
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
1038
        * new_col: sql_gen.Col(orig_col_name, into)
1039
        * All mappings use the into table so its name can easily be
1040
          changed for all columns at once
1041
    '''
1042
    cols = lists.uniqify(cols)
1043
    
1044
    items = []
1045
    for col in preserve:
1046
        orig_col = copy.copy(col)
1047
        col.table = into
1048
        items.append((orig_col, col))
1049
    preserve = set(preserve)
1050
    for col in cols:
1051
        if col not in preserve:
1052
            items.append((col, sql_gen.Col(strings.ustr(col), into, col.srcs)))
1053
    
1054
    if not as_items: items = dict(items)
1055
    return items
1056

    
1057
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
1058
    '''For params, see mk_flatten_mapping()
1059
    @return See return value of mk_flatten_mapping()
1060
    '''
1061
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
1062
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
1063
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
1064
        into=into, add_pkey_=True)
1065
        # don't cache because the temp table will usually be truncated after use
1066
    return dict(items)
1067

    
1068
##### Database structure introspection
1069

    
1070
#### Tables
1071

    
1072
def tables(db, schema_like='public', table_like='%', exact=False,
1073
    cacheable=True):
1074
    if exact: compare = '='
1075
    else: compare = 'LIKE'
1076
    
1077
    module = util.root_module(db.db)
1078
    if module == 'psycopg2':
1079
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1080
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1081
        return values(select(db, 'pg_tables', ['tablename'], conds,
1082
            order_by='tablename', cacheable=cacheable, log_level=4))
1083
    elif module == 'MySQLdb':
1084
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1085
            , cacheable=True, log_level=4))
1086
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1087

    
1088
def table_exists(db, table, cacheable=True):
1089
    table = sql_gen.as_Table(table)
1090
    return list(tables(db, table.schema, table.name, True, cacheable)) != []
1091

    
1092
def table_row_count(db, table, recover=None):
1093
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
1094
        order_by=None), recover=recover, log_level=3))
1095

    
1096
def table_col_names(db, table, recover=None):
1097
    return list(col_names(select(db, table, limit=0, recover=recover,
1098
        log_level=4)))
1099

    
1100
def table_cols(db, table, *args, **kw_args):
1101
    return [sql_gen.as_Col(strings.ustr(c), table)
1102
        for c in table_col_names(db, table, *args, **kw_args)]
1103

    
1104
def table_pkey_index(db, table, recover=None):
1105
    table_str = sql_gen.Literal(table.to_str(db))
1106
    try:
1107
        return sql_gen.Table(value(run_query(db, '''\
1108
SELECT relname
1109
FROM pg_index
1110
JOIN pg_class index ON index.oid = indexrelid
1111
WHERE
1112
indrelid = '''+table_str.to_str(db)+'''::regclass
1113
AND indisprimary
1114
'''
1115
            , recover, cacheable=True, log_level=4)), table.schema)
1116
    except StopIteration: raise DoesNotExistException('primary key', '')
1117

    
1118
def table_pkey_col(db, table, recover=None):
1119
    table = sql_gen.as_Table(table)
1120
    
1121
    module = util.root_module(db.db)
1122
    if module == 'psycopg2':
1123
        return sql_gen.Col(index_cols(db, table_pkey_index(db, table,
1124
            recover))[0], table)
1125
    else:
1126
        join_cols = ['table_schema', 'table_name', 'constraint_schema',
1127
            'constraint_name']
1128
        tables = [sql_gen.Table('key_column_usage', 'information_schema'),
1129
            sql_gen.Join(
1130
                sql_gen.Table('table_constraints', 'information_schema'),
1131
                dict(((c, sql_gen.join_same_not_null) for c in join_cols)))]
1132
        cols = [sql_gen.Col('column_name')]
1133
        
1134
        conds = [('constraint_type', 'PRIMARY KEY'), ('table_name', table.name)]
1135
        schema = table.schema
1136
        if schema != None: conds.append(('table_schema', schema))
1137
        order_by = 'position_in_unique_constraint'
1138
        
1139
        try: return sql_gen.Col(value(select(db, tables, cols, conds,
1140
            order_by=order_by, limit=1, log_level=4)), table)
1141
        except StopIteration: raise DoesNotExistException('primary key', '')
1142

    
1143
def table_has_pkey(db, table, recover=None):
1144
    try: table_pkey_col(db, table, recover)
1145
    except DoesNotExistException: return False
1146
    else: return True
1147

    
1148
def pkey_name(db, table, recover=None):
1149
    '''If no pkey, returns the first column in the table.'''
1150
    return pkey_col(db, table, recover).name
1151

    
1152
def pkey_col(db, table, recover=None):
1153
    '''If no pkey, returns the first column in the table.'''
1154
    try: return table_pkey_col(db, table, recover)
1155
    except DoesNotExistException: return table_cols(db, table, recover)[0]
1156

    
1157
not_null_col = 'not_null_col'
1158

    
1159
def table_not_null_col(db, table, recover=None):
1160
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
1161
    if not_null_col in table_col_names(db, table, recover): return not_null_col
1162
    else: return pkey_name(db, table, recover)
1163

    
1164
def constraint_cond(db, constraint):
1165
    module = util.root_module(db.db)
1166
    if module == 'psycopg2':
1167
        table_str = sql_gen.Literal(constraint.table.to_str(db))
1168
        name_str = sql_gen.Literal(constraint.name)
1169
        return value(run_query(db, '''\
1170
SELECT consrc
1171
FROM pg_constraint
1172
WHERE
1173
conrelid = '''+table_str.to_str(db)+'''::regclass
1174
AND conname = '''+name_str.to_str(db)+'''
1175
'''
1176
            , cacheable=True, log_level=4))
1177
    else: raise NotImplementedError("Can't get constraint condition for "
1178
        +module+' database')
1179

    
1180
def index_exprs(db, index):
1181
    index = sql_gen.as_Table(index)
1182
    module = util.root_module(db.db)
1183
    if module == 'psycopg2':
1184
        qual_index = sql_gen.Literal(index.to_str(db))
1185
        return list(values(run_query(db, '''\
1186
SELECT pg_get_indexdef(indexrelid, generate_series(1, indnatts), true)
1187
FROM pg_index
1188
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1189
'''
1190
            , cacheable=True, log_level=4)))
1191
    else: raise NotImplementedError()
1192

    
1193
def index_cols(db, index):
1194
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
1195
    automatically created. When you don't know whether something is a UNIQUE
1196
    constraint or a UNIQUE index, use this function.'''
1197
    return map(sql_gen.parse_expr_col, index_exprs(db, index))
1198

    
1199
def index_cond(db, index):
1200
    index = sql_gen.as_Table(index)
1201
    module = util.root_module(db.db)
1202
    if module == 'psycopg2':
1203
        qual_index = sql_gen.Literal(index.to_str(db))
1204
        return value(run_query(db, '''\
1205
SELECT pg_get_expr(indpred, indrelid, true)
1206
FROM pg_index
1207
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1208
'''
1209
            , cacheable=True, log_level=4))
1210
    else: raise NotImplementedError()
1211

    
1212
def index_order_by(db, index):
1213
    return sql_gen.CustomCode(', '.join(index_exprs(db, index)))
1214

    
1215
def table_cluster_on(db, table, recover=None):
1216
    '''
1217
    @return The table's cluster index, or its pkey if none is set
1218
    '''
1219
    table_str = sql_gen.Literal(table.to_str(db))
1220
    try:
1221
        return sql_gen.Table(value(run_query(db, '''\
1222
SELECT relname
1223
FROM pg_index
1224
JOIN pg_class index ON index.oid = indexrelid
1225
WHERE
1226
indrelid = '''+table_str.to_str(db)+'''::regclass
1227
AND indisclustered
1228
'''
1229
            , recover, cacheable=True, log_level=4)), table.schema)
1230
    except StopIteration: return table_pkey_index(db, table, recover)
1231

    
1232
def table_order_by(db, table, recover=None):
1233
    if table.order_by == None:
1234
        try: table.order_by = index_order_by(db, table_cluster_on(db, table,
1235
            recover))
1236
        except DoesNotExistException: pass
1237
    return table.order_by
1238

    
1239
#### Functions
1240

    
1241
def function_exists(db, function):
1242
    qual_function = sql_gen.Literal(function.to_str(db))
1243
    try:
1244
        select(db, fields=[sql_gen.Cast('regproc', qual_function)],
1245
            recover=True, cacheable=True, log_level=4)
1246
    except DoesNotExistException: return False
1247
    except DuplicateException: return True # overloaded function
1248
    else: return True
1249

    
1250
def function_param0_type(db, function):
1251
    qual_function = sql_gen.Literal(function.to_str(db))
1252
    return value(run_query(db, '''\
1253
SELECT proargtypes[0]::regtype
1254
FROM pg_proc
1255
WHERE oid = '''+qual_function.to_str(db)+'''::regproc
1256
'''
1257
        , cacheable=True, log_level=4))
1258

    
1259
##### Structural changes
1260

    
1261
#### Columns
1262

    
1263
def add_col(db, table, col, comment=None, if_not_exists=False, **kw_args):
1264
    '''
1265
    @param col TypedCol Name may be versioned, so be sure to propagate any
1266
        renaming back to any source column for the TypedCol.
1267
    @param comment None|str SQL comment used to distinguish columns of the same
1268
        name from each other when they contain different data, to allow the
1269
        ADD COLUMN query to be cached. If not set, query will not be cached.
1270
    '''
1271
    assert isinstance(col, sql_gen.TypedCol)
1272
    
1273
    while True:
1274
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1275
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1276
        
1277
        try:
1278
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1279
            break
1280
        except DuplicateException:
1281
            if if_not_exists: raise
1282
            col.name = next_version(col.name)
1283
            # try again with next version of name
1284

    
1285
def add_not_null(db, col):
1286
    table = col.table
1287
    col = sql_gen.to_name_only_col(col)
1288
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1289
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1290

    
1291
def drop_not_null(db, col):
1292
    table = col.table
1293
    col = sql_gen.to_name_only_col(col)
1294
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1295
        +col.to_str(db)+' DROP NOT NULL', cacheable=True, log_level=3)
1296

    
1297
row_num_col = '_row_num'
1298

    
1299
row_num_col_def = sql_gen.TypedCol('', 'serial', nullable=False,
1300
    constraints='PRIMARY KEY')
1301

    
1302
def add_row_num(db, table, name=row_num_col):
1303
    '''Adds a row number column to a table. Its definition is in
1304
    row_num_col_def. It will be the primary key.'''
1305
    col_def = copy.copy(row_num_col_def)
1306
    col_def.name = name
1307
    add_col(db, table, col_def, comment='', if_not_exists=True, log_level=3)
1308

    
1309
#### Indexes
1310

    
1311
def add_pkey(db, table, cols=None, recover=None):
1312
    '''Adds a primary key.
1313
    @param cols [sql_gen.Col,...] The columns in the primary key.
1314
        Defaults to the first column in the table.
1315
    @pre The table must not already have a primary key.
1316
    '''
1317
    table = sql_gen.as_Table(table)
1318
    if cols == None: cols = [pkey_name(db, table, recover)]
1319
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1320
    
1321
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1322
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1323
        log_ignore_excs=(DuplicateException,))
1324

    
1325
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1326
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1327
    Currently, only function calls and literal values are supported expressions.
1328
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1329
        This allows indexes to be used for comparisons where NULLs are equal.
1330
    '''
1331
    exprs = lists.mk_seq(exprs)
1332
    
1333
    # Parse exprs
1334
    old_exprs = exprs[:]
1335
    exprs = []
1336
    cols = []
1337
    for i, expr in enumerate(old_exprs):
1338
        expr = sql_gen.as_Col(expr, table)
1339
        
1340
        # Handle nullable columns
1341
        if ensure_not_null_:
1342
            try: expr = sql_gen.ensure_not_null(db, expr)
1343
            except KeyError: pass # unknown type, so just create plain index
1344
        
1345
        # Extract col
1346
        expr = copy.deepcopy(expr) # don't modify input!
1347
        col = expr
1348
        if isinstance(expr, sql_gen.FunctionCall): col = expr.args[0]
1349
        expr = sql_gen.cast_literal(expr)
1350
        if not isinstance(expr, (sql_gen.Expr, sql_gen.Col)):
1351
            expr = sql_gen.Expr(expr)
1352
            
1353
        
1354
        # Extract table
1355
        if table == None:
1356
            assert sql_gen.is_table_col(col)
1357
            table = col.table
1358
        
1359
        if isinstance(col, sql_gen.Col): col.table = None
1360
        
1361
        exprs.append(expr)
1362
        cols.append(col)
1363
    
1364
    table = sql_gen.as_Table(table)
1365
    
1366
    # Add index
1367
    str_ = 'CREATE'
1368
    if unique: str_ += ' UNIQUE'
1369
    str_ += ' INDEX ON '+table.to_str(db)+' ('+(
1370
        ', '.join((v.to_str(db) for v in exprs)))+')'
1371
    run_query(db, str_, recover=True, cacheable=True, log_level=3)
1372

    
1373
def add_pkey_index(db, table): add_index(db, pkey_col(db, table), table)
1374

    
1375
def add_pkey_or_index(db, table, cols=None, recover=None, warn=False):
1376
    try: add_pkey(db, table, cols, recover)
1377
    except DuplicateKeyException, e:
1378
        if warn: warnings.warn(UserWarning(exc.str_(e)))
1379
        add_pkey_index(db, table)
1380

    
1381
already_indexed = object() # tells add_indexes() the pkey has already been added
1382

    
1383
def add_indexes(db, table, has_pkey=True):
1384
    '''Adds an index on all columns in a table.
1385
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1386
        index should be added on the first column.
1387
        * If already_indexed, the pkey is assumed to have already been added
1388
    '''
1389
    cols = table_col_names(db, table)
1390
    if has_pkey:
1391
        if has_pkey is not already_indexed: add_pkey(db, table)
1392
        cols = cols[1:]
1393
    for col in cols: add_index(db, col, table)
1394

    
1395
#### Tables
1396

    
1397
### Maintenance
1398

    
1399
def analyze(db, table):
1400
    table = sql_gen.as_Table(table)
1401
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1402

    
1403
def autoanalyze(db, table):
1404
    if db.autoanalyze: analyze(db, table)
1405

    
1406
def vacuum(db, table):
1407
    table = sql_gen.as_Table(table)
1408
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1409
        log_level=3))
1410

    
1411
### Lifecycle
1412

    
1413
def drop(db, type_, name):
1414
    name = sql_gen.as_Name(name)
1415
    run_query(db, 'DROP '+type_+' IF EXISTS '+name.to_str(db)+' CASCADE')
1416

    
1417
def drop_table(db, table): drop(db, 'TABLE', table)
1418

    
1419
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1420
    like=None):
1421
    '''Creates a table.
1422
    @param cols [sql_gen.TypedCol,...] The column names and types
1423
    @param has_pkey If set, the first column becomes the primary key.
1424
    @param col_indexes bool|[ref]
1425
        * If True, indexes will be added on all non-pkey columns.
1426
        * If a list reference, [0] will be set to a function to do this.
1427
          This can be used to delay index creation until the table is populated.
1428
    '''
1429
    table = sql_gen.as_Table(table)
1430
    
1431
    if like != None:
1432
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1433
            ]+cols
1434
        table.order_by = like.order_by
1435
    if has_pkey:
1436
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1437
        pkey.constraints = 'PRIMARY KEY'
1438
    
1439
    temp = table.is_temp and not db.debug_temp
1440
        # temp tables permanent in debug_temp mode
1441
    
1442
    # Create table
1443
    def create():
1444
        str_ = 'CREATE'
1445
        if temp: str_ += ' TEMP'
1446
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1447
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1448
        str_ += '\n);'
1449
        
1450
        run_query(db, str_, recover=True, cacheable=True, log_level=2,
1451
            log_ignore_excs=(DuplicateException,))
1452
    if table.is_temp:
1453
        while True:
1454
            try:
1455
                create()
1456
                break
1457
            except DuplicateException:
1458
                table.name = next_version(table.name)
1459
                # try again with next version of name
1460
    else: create()
1461
    
1462
    # Add indexes
1463
    if has_pkey: has_pkey = already_indexed
1464
    def add_indexes_(): add_indexes(db, table, has_pkey)
1465
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1466
    elif col_indexes: add_indexes_() # add now
1467

    
1468
def copy_table_struct(db, src, dest):
1469
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1470
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1471

    
1472
def copy_table(db, src, dest):
1473
    '''Creates a copy of a table, including data'''
1474
    copy_table_struct(db, src, dest)
1475
    insert_select(db, dest, None, mk_select(db, src))
1476

    
1477
### Data
1478

    
1479
def truncate(db, table, schema='public', **kw_args):
1480
    '''For params, see run_query()'''
1481
    table = sql_gen.as_Table(table, schema)
1482
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1483

    
1484
def empty_temp(db, tables):
1485
    tables = lists.mk_seq(tables)
1486
    for table in tables: truncate(db, table, log_level=3)
1487

    
1488
def empty_db(db, schema='public', **kw_args):
1489
    '''For kw_args, see tables()'''
1490
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1491

    
1492
def distinct_table(db, table, distinct_on):
1493
    '''Creates a copy of a temp table which is distinct on the given columns.
1494
    Adds an index on table's distinct_on columns, to facilitate merge joins.
1495
    @param distinct_on If empty, creates a table with one row. This is useful if
1496
        your distinct_on columns are all literal values.
1497
    @return The new table.
1498
    '''
1499
    new_table = sql_gen.suffixed_table(table, '_distinct')
1500
    distinct_on = filter(sql_gen.is_table_col, distinct_on)
1501
    
1502
    copy_table_struct(db, table, new_table)
1503
    
1504
    limit = None
1505
    if distinct_on == []: limit = 1 # one sample row
1506
    else: add_index(db, distinct_on, table) # for join optimization
1507
    
1508
    insert_select(db, new_table, None, mk_select(db, table,
1509
        distinct_on=distinct_on, order_by=None, limit=limit))
1510
    analyze(db, new_table)
1511
    
1512
    return new_table
(30-30/44)