Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 11 aaronmk
import re
5 865 aaronmk
import warnings
6 11 aaronmk
7 300 aaronmk
import exc
8 1909 aaronmk
import dicts
9 1893 aaronmk
import iters
10 1960 aaronmk
import lists
11 1889 aaronmk
from Proxy import Proxy
12 1872 aaronmk
import rand
13 2217 aaronmk
import sql_gen
14 862 aaronmk
import strings
15 131 aaronmk
import util
16 11 aaronmk
17 832 aaronmk
##### Exceptions
18
19 2804 aaronmk
def get_cur_query(cur, input_query=None):
20 2168 aaronmk
    raw_query = None
21
    if hasattr(cur, 'query'): raw_query = cur.query
22
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
23 2170 aaronmk
24
    if raw_query != None: return raw_query
25 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
26 14 aaronmk
27 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
28
    '''For params, see get_cur_query()'''
29 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
30 135 aaronmk
31 300 aaronmk
class DbException(exc.ExceptionWithCause):
32 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
33 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
34 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
35
36 2143 aaronmk
class ExceptionWithName(DbException):
37
    def __init__(self, name, cause=None):
38 2484 aaronmk
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
39 2143 aaronmk
        self.name = name
40 360 aaronmk
41 3109 aaronmk
class ExceptionWithValue(DbException):
42
    def __init__(self, value, cause=None):
43
        DbException.__init__(self, 'for value: '+strings.as_tt(repr(value)),
44
            cause)
45 2240 aaronmk
        self.value = value
46
47 2945 aaronmk
class ExceptionWithNameType(DbException):
48
    def __init__(self, type_, name, cause=None):
49
        DbException.__init__(self, 'for type: '+strings.as_tt(str(type_))
50
            +'; name: '+strings.as_tt(name), cause)
51
        self.type = type_
52
        self.name = name
53
54 2306 aaronmk
class ConstraintException(DbException):
55
    def __init__(self, name, cols, cause=None):
56 2484 aaronmk
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
57
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
58 2306 aaronmk
        self.name = name
59 468 aaronmk
        self.cols = cols
60 11 aaronmk
61 2523 aaronmk
class MissingCastException(DbException):
62
    def __init__(self, type_, col, cause=None):
63
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
64
            +' on column: '+strings.as_tt(col), cause)
65
        self.type = type_
66
        self.col = col
67
68 2143 aaronmk
class NameException(DbException): pass
69
70 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
71 13 aaronmk
72 2306 aaronmk
class NullValueException(ConstraintException): pass
73 13 aaronmk
74 3109 aaronmk
class InvalidValueException(ExceptionWithValue): pass
75 2239 aaronmk
76 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
77 2143 aaronmk
78 89 aaronmk
class EmptyRowException(DbException): pass
79
80 865 aaronmk
##### Warnings
81
82
class DbWarning(UserWarning): pass
83
84 1930 aaronmk
##### Result retrieval
85
86
def col_names(cur): return (col[0] for col in cur.description)
87
88
def rows(cur): return iter(lambda: cur.fetchone(), None)
89
90
def consume_rows(cur):
91
    '''Used to fetch all rows so result will be cached'''
92
    iters.consume_iter(rows(cur))
93
94
def next_row(cur): return rows(cur).next()
95
96
def row(cur):
97
    row_ = next_row(cur)
98
    consume_rows(cur)
99
    return row_
100
101
def next_value(cur): return next_row(cur)[0]
102
103
def value(cur): return row(cur)[0]
104
105
def values(cur): return iters.func_iter(lambda: next_value(cur))
106
107
def value_or_none(cur):
108
    try: return value(cur)
109
    except StopIteration: return None
110
111 2762 aaronmk
##### Escaping
112 2101 aaronmk
113 2573 aaronmk
def esc_name_by_module(module, name):
114
    if module == 'psycopg2' or module == None: quote = '"'
115 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
116
    else: raise NotImplementedError("Can't escape name for "+module+' database')
117 2500 aaronmk
    return sql_gen.esc_name(name, quote)
118 2101 aaronmk
119
def esc_name_by_engine(engine, name, **kw_args):
120
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
121
122
def esc_name(db, name, **kw_args):
123
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
124
125
def qual_name(db, schema, table):
126
    def esc_name_(name): return esc_name(db, name)
127
    table = esc_name_(table)
128
    if schema != None: return esc_name_(schema)+'.'+table
129
    else: return table
130
131 1869 aaronmk
##### Database connections
132 1849 aaronmk
133 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
134 1926 aaronmk
135 1869 aaronmk
db_engines = {
136
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
137
    'PostgreSQL': ('psycopg2', {}),
138
}
139
140
DatabaseErrors_set = set([DbException])
141
DatabaseErrors = tuple(DatabaseErrors_set)
142
143
def _add_module(module):
144
    DatabaseErrors_set.add(module.DatabaseError)
145
    global DatabaseErrors
146
    DatabaseErrors = tuple(DatabaseErrors_set)
147
148
def db_config_str(db_config):
149
    return db_config['engine']+' database '+db_config['database']
150
151 2448 aaronmk
log_debug_none = lambda msg, level=2: None
152 1901 aaronmk
153 1849 aaronmk
class DbConn:
154 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
155 2915 aaronmk
        log_debug=log_debug_none, debug_temp=False):
156
        '''
157
        @param debug_temp Whether temporary objects should instead be permanent.
158
            This assists in debugging the internal objects used by the program.
159
        '''
160 1869 aaronmk
        self.db_config = db_config
161 2190 aaronmk
        self.autocommit = autocommit
162
        self.caching = caching
163 1901 aaronmk
        self.log_debug = log_debug
164 2193 aaronmk
        self.debug = log_debug != log_debug_none
165 2915 aaronmk
        self.debug_temp = debug_temp
166 3074 aaronmk
        self.autoanalyze = False
167 1869 aaronmk
168
        self.__db = None
169 1889 aaronmk
        self.query_results = {}
170 2139 aaronmk
        self._savepoint = 0
171 2671 aaronmk
        self._notices_seen = set()
172 1869 aaronmk
173
    def __getattr__(self, name):
174
        if name == '__dict__': raise Exception('getting __dict__')
175
        if name == 'db': return self._db()
176
        else: raise AttributeError()
177
178
    def __getstate__(self):
179
        state = copy.copy(self.__dict__) # shallow copy
180 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
181 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
182
        return state
183
184 2165 aaronmk
    def connected(self): return self.__db != None
185
186 3116 aaronmk
    def close(self):
187
        if self.connected(): self.db.close()
188
        self.__db = None
189
190 1869 aaronmk
    def _db(self):
191
        if self.__db == None:
192
            # Process db_config
193
            db_config = self.db_config.copy() # don't modify input!
194 2097 aaronmk
            schemas = db_config.pop('schemas', None)
195 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
196
            module = __import__(module_name)
197
            _add_module(module)
198
            for orig, new in mappings.iteritems():
199
                try: util.rename_key(db_config, orig, new)
200
                except KeyError: pass
201
202
            # Connect
203
            self.__db = module.connect(**db_config)
204
205
            # Configure connection
206 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
207
                import psycopg2.extensions
208
                self.db.set_isolation_level(
209
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
210 2101 aaronmk
            if schemas != None:
211 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
212
                search_path.append(value(run_query(self, 'SHOW search_path',
213
                    log_level=4)))
214
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
215
                    log_level=3)
216 1869 aaronmk
217
        return self.__db
218 1889 aaronmk
219 1891 aaronmk
    class DbCursor(Proxy):
220 1927 aaronmk
        def __init__(self, outer):
221 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
222 2191 aaronmk
            self.outer = outer
223 1927 aaronmk
            self.query_results = outer.query_results
224 1894 aaronmk
            self.query_lookup = None
225 1891 aaronmk
            self.result = []
226 1889 aaronmk
227 2802 aaronmk
        def execute(self, query):
228 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
229 2797 aaronmk
            self.query_lookup = query
230 2148 aaronmk
            try:
231 2191 aaronmk
                try:
232 2802 aaronmk
                    cur = self.inner.execute(query)
233 2191 aaronmk
                    self.outer.do_autocommit()
234 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
235 1904 aaronmk
            except Exception, e:
236 2802 aaronmk
                _add_cursor_info(e, self, query)
237 1904 aaronmk
                self.result = e # cache the exception as the result
238
                self._cache_result()
239
                raise
240 3004 aaronmk
241
            # Always cache certain queries
242
            if query.startswith('CREATE') or query.startswith('ALTER'):
243 3007 aaronmk
                # structural changes
244 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
245
                # so don't cache ADD COLUMN unless it has distinguishing comment
246
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
247 3007 aaronmk
                    self._cache_result()
248 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
249 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
250 3004 aaronmk
251 2762 aaronmk
            return cur
252 1894 aaronmk
253 1891 aaronmk
        def fetchone(self):
254
            row = self.inner.fetchone()
255 1899 aaronmk
            if row != None: self.result.append(row)
256
            # otherwise, fetched all rows
257 1904 aaronmk
            else: self._cache_result()
258
            return row
259
260
        def _cache_result(self):
261 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
262
            # inserts are not idempotent. Other non-SELECT queries don't have
263
            # their result set read, so only exceptions will be cached (an
264
            # invalid query will always be invalid).
265 1930 aaronmk
            if self.query_results != None and (not self._is_insert
266 1906 aaronmk
                or isinstance(self.result, Exception)):
267
268 1894 aaronmk
                assert self.query_lookup != None
269 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
270
                    util.dict_subset(dicts.AttrsDictView(self),
271
                    ['query', 'result', 'rowcount', 'description']))
272 1906 aaronmk
273 1916 aaronmk
        class CacheCursor:
274
            def __init__(self, cached_result): self.__dict__ = cached_result
275
276 1927 aaronmk
            def execute(self, *args, **kw_args):
277 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
278
                # otherwise, result is a rows list
279
                self.iter = iter(self.result)
280
281
            def fetchone(self):
282
                try: return self.iter.next()
283
                except StopIteration: return None
284 1891 aaronmk
285 2212 aaronmk
    def esc_value(self, value):
286 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
287
        except NotImplementedError, e:
288
            module = util.root_module(self.db)
289
            if module == 'MySQLdb':
290
                import _mysql
291
                str_ = _mysql.escape_string(value)
292
            else: raise e
293 2374 aaronmk
        return strings.to_unicode(str_)
294 2212 aaronmk
295 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
296
297 2814 aaronmk
    def std_code(self, str_):
298
        '''Standardizes SQL code.
299
        * Ensures that string literals are prefixed by `E`
300
        '''
301
        if str_.startswith("'"): str_ = 'E'+str_
302
        return str_
303
304 2665 aaronmk
    def can_mogrify(self):
305 2663 aaronmk
        module = util.root_module(self.db)
306 2665 aaronmk
        return module == 'psycopg2'
307 2663 aaronmk
308 2665 aaronmk
    def mogrify(self, query, params=None):
309
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
310
        else: raise NotImplementedError("Can't mogrify query")
311
312 2671 aaronmk
    def print_notices(self):
313 2725 aaronmk
        if hasattr(self.db, 'notices'):
314
            for msg in self.db.notices:
315
                if msg not in self._notices_seen:
316
                    self._notices_seen.add(msg)
317
                    self.log_debug(msg, level=2)
318 2671 aaronmk
319 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
320 2464 aaronmk
        debug_msg_ref=None):
321 2445 aaronmk
        '''
322 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
323
            throws one of these exceptions.
324 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
325
            this instead of being output. This allows you to filter log messages
326
            depending on the result of the query.
327 2445 aaronmk
        '''
328 2167 aaronmk
        assert query != None
329
330 2047 aaronmk
        if not self.caching: cacheable = False
331 1903 aaronmk
        used_cache = False
332 2664 aaronmk
333
        def log_msg(query):
334
            if used_cache: cache_status = 'cache hit'
335
            elif cacheable: cache_status = 'cache miss'
336
            else: cache_status = 'non-cacheable'
337
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
338
339 1903 aaronmk
        try:
340 1927 aaronmk
            # Get cursor
341
            if cacheable:
342
                try:
343 2797 aaronmk
                    cur = self.query_results[query]
344 1927 aaronmk
                    used_cache = True
345
                except KeyError: cur = self.DbCursor(self)
346
            else: cur = self.db.cursor()
347
348 2664 aaronmk
            # Log query
349
            if self.debug and debug_msg_ref == None: # log before running
350
                self.log_debug(log_msg(query), log_level)
351
352 1927 aaronmk
            # Run query
353 2793 aaronmk
            cur.execute(query)
354 1903 aaronmk
        finally:
355 2671 aaronmk
            self.print_notices()
356 2664 aaronmk
            if self.debug and debug_msg_ref != None: # return after running
357 2793 aaronmk
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
358 1903 aaronmk
359
        return cur
360 1914 aaronmk
361 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
362 2139 aaronmk
363 2907 aaronmk
    def with_autocommit(self, func):
364 2801 aaronmk
        import psycopg2.extensions
365
366
        prev_isolation_level = self.db.isolation_level
367 2907 aaronmk
        self.db.set_isolation_level(
368
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
369 2683 aaronmk
        try: return func()
370 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
371 2683 aaronmk
372 2139 aaronmk
    def with_savepoint(self, func):
373 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
374 2443 aaronmk
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
375 2139 aaronmk
        self._savepoint += 1
376 2930 aaronmk
        try: return func()
377 2139 aaronmk
        except:
378 2443 aaronmk
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
379 2139 aaronmk
            raise
380 2930 aaronmk
        finally:
381
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
382
            # "The savepoint remains valid and can be rolled back to again"
383
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
384 2443 aaronmk
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
385 2930 aaronmk
386
            self._savepoint -= 1
387
            assert self._savepoint >= 0
388
389
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
390 2191 aaronmk
391
    def do_autocommit(self):
392
        '''Autocommits if outside savepoint'''
393
        assert self._savepoint >= 0
394
        if self.autocommit and self._savepoint == 0:
395 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
396 2191 aaronmk
            self.db.commit()
397 2643 aaronmk
398 2819 aaronmk
    def col_info(self, col):
399 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
400 3063 aaronmk
        type_ = sql_gen.Coalesce(sql_gen.Nullif(sql_gen.Col('data_type'),
401
            'USER-DEFINED'), sql_gen.Col('udt_name'))
402 3078 aaronmk
        cols = [type_, 'column_default',
403
            sql_gen.Cast('boolean', sql_gen.Col('is_nullable'))]
404 2643 aaronmk
405
        conds = [('table_name', col.table.name), ('column_name', col.name)]
406
        schema = col.table.schema
407
        if schema != None: conds.append(('table_schema', schema))
408
409 2819 aaronmk
        type_, default, nullable = row(select(self, table, cols, conds,
410 3059 aaronmk
            order_by='table_schema', limit=1, cacheable=False, log_level=4))
411 2643 aaronmk
            # TODO: order_by search_path schema order
412 2819 aaronmk
        default = sql_gen.as_Code(default, self)
413
414
        return sql_gen.TypedCol(col.name, type_, default, nullable)
415 2917 aaronmk
416
    def TempFunction(self, name):
417
        if self.debug_temp: schema = None
418
        else: schema = 'pg_temp'
419
        return sql_gen.Function(name, schema)
420 1849 aaronmk
421 1869 aaronmk
connect = DbConn
422
423 832 aaronmk
##### Recoverable querying
424 15 aaronmk
425 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
426 11 aaronmk
427 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
428
    log_ignore_excs=None, **kw_args):
429 2794 aaronmk
    '''For params, see DbConn.run_query()'''
430 830 aaronmk
    if recover == None: recover = False
431 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
432
    log_ignore_excs = tuple(log_ignore_excs)
433 830 aaronmk
434 2666 aaronmk
    debug_msg_ref = None # usually, db.run_query() logs query before running it
435
    # But if filtering with log_ignore_excs, wait until after exception parsing
436 2984 aaronmk
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None]
437 2666 aaronmk
438 2148 aaronmk
    try:
439 2464 aaronmk
        try:
440 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
441 2793 aaronmk
                debug_msg_ref, **kw_args)
442 2796 aaronmk
            if recover and not db.is_cached(query):
443 2464 aaronmk
                return with_savepoint(db, run)
444
            else: return run() # don't need savepoint if cached
445
        except Exception, e:
446 3095 aaronmk
            msg = strings.ustr(e.args[0])
447 2464 aaronmk
448 3095 aaronmk
            match = re.match(r'^duplicate key value violates unique constraint '
449 3096 aaronmk
                r'"((_?[^\W_]+(?=[._]))?.+?)"', msg)
450 2464 aaronmk
            if match:
451
                constraint, table = match.groups()
452 3025 aaronmk
                cols = []
453
                if recover: # need auto-rollback to run index_cols()
454
                    try: cols = index_cols(db, table, constraint)
455
                    except NotImplementedError: pass
456
                raise DuplicateKeyException(constraint, cols, e)
457 2464 aaronmk
458 3095 aaronmk
            match = re.match(r'^null value in column "(.+?)" violates not-null'
459 2464 aaronmk
                r' constraint', msg)
460
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
461
462 3095 aaronmk
            match = re.match(r'^(?:invalid input (?:syntax|value)\b.*?'
463 3109 aaronmk
                r'|.+? field value out of range): "(.+?)"', msg)
464 2464 aaronmk
            if match:
465 3109 aaronmk
                value, = match.groups()
466
                raise InvalidValueException(strings.to_unicode(value), e)
467 2464 aaronmk
468 3095 aaronmk
            match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
469 2523 aaronmk
                r'is of type', msg)
470
            if match:
471
                col, type_ = match.groups()
472
                raise MissingCastException(type_, col, e)
473
474 3095 aaronmk
            match = re.match(r'^(\S+) "(.+?)".*? already exists', msg)
475 2945 aaronmk
            if match:
476
                type_, name = match.groups()
477
                raise DuplicateException(type_, name, e)
478 2464 aaronmk
479
            raise # no specific exception raised
480
    except log_ignore_excs:
481
        log_level += 2
482
        raise
483
    finally:
484 2666 aaronmk
        if debug_msg_ref != None and debug_msg_ref[0] != None:
485
            db.log_debug(debug_msg_ref[0], log_level)
486 830 aaronmk
487 832 aaronmk
##### Basic queries
488
489 2153 aaronmk
def next_version(name):
490 2163 aaronmk
    version = 1 # first existing name was version 0
491 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
492 2153 aaronmk
    if match:
493 2586 aaronmk
        name, version = match.groups()
494
        version = int(version)+1
495 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
496 2153 aaronmk
497 2899 aaronmk
def lock_table(db, table, mode):
498
    table = sql_gen.as_Table(table)
499
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
500
501 2789 aaronmk
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
502 2085 aaronmk
    '''Outputs a query to a temp table.
503
    For params, see run_query().
504
    '''
505 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
506 2790 aaronmk
507
    assert isinstance(into, sql_gen.Table)
508
509 2992 aaronmk
    into.is_temp = True
510 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
511
    into.schema = None
512 2992 aaronmk
513 2790 aaronmk
    kw_args['recover'] = True
514 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
515 2790 aaronmk
516 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
517 2790 aaronmk
518
    # Create table
519
    while True:
520
        create_query = 'CREATE'
521
        if temp: create_query += ' TEMP'
522
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
523 2385 aaronmk
524 2790 aaronmk
        try:
525
            cur = run_query(db, create_query, **kw_args)
526
                # CREATE TABLE AS sets rowcount to # rows in query
527
            break
528 2945 aaronmk
        except DuplicateException, e:
529 2790 aaronmk
            into.name = next_version(into.name)
530
            # try again with next version of name
531
532
    if add_indexes_: add_indexes(db, into)
533 3075 aaronmk
534
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
535
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
536
    # table is going to be used in complex queries, it is wise to run ANALYZE on
537
    # the temporary table after it is populated."
538
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
539
    # If into is not a temp table, ANALYZE is useful but not required.
540 3073 aaronmk
    analyze(db, into)
541 2790 aaronmk
542
    return cur
543 2085 aaronmk
544 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
545
546 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
547
548 2233 aaronmk
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
549 2293 aaronmk
    start=None, order_by=order_by_pkey, default_table=None):
550 1981 aaronmk
    '''
551 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
552 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
553 1981 aaronmk
    @param fields Use None to select all fields in the table
554 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
555 2379 aaronmk
        * container can be any iterable type
556 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
557
        * compare_right_side: sql_gen.ValueCond|literal value
558 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
559
        use all columns
560 2786 aaronmk
    @return query
561 1981 aaronmk
    '''
562 2315 aaronmk
    # Parse tables param
563 2964 aaronmk
    tables = lists.mk_seq(tables)
564 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
565 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
566 2121 aaronmk
567 2315 aaronmk
    # Parse other params
568 2376 aaronmk
    if conds == None: conds = []
569 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
570 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
571 135 aaronmk
    assert limit == None or type(limit) == int
572 865 aaronmk
    assert start == None or type(start) == int
573 2315 aaronmk
    if order_by is order_by_pkey:
574
        if distinct_on != []: order_by = None
575
        else: order_by = pkey(db, table0, recover=True)
576 865 aaronmk
577 2315 aaronmk
    query = 'SELECT'
578 2056 aaronmk
579 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
580 2056 aaronmk
581 2200 aaronmk
    # DISTINCT ON columns
582 2233 aaronmk
    if distinct_on != []:
583 2467 aaronmk
        query += '\nDISTINCT'
584 2254 aaronmk
        if distinct_on is not distinct_on_all:
585 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
586
587
    # Columns
588 3027 aaronmk
    if fields == None:
589
        if query.find('\n') >= 0: whitespace = '\n'
590
        else: whitespace = ' '
591
        query += whitespace+'*'
592 2765 aaronmk
    else:
593
        assert fields != []
594 3027 aaronmk
        query += '\n'+('\n, '.join(map(parse_col, fields)))
595 2200 aaronmk
596
    # Main table
597 2467 aaronmk
    query += '\nFROM '+table0.to_str(db)
598 865 aaronmk
599 2122 aaronmk
    # Add joins
600 2271 aaronmk
    left_table = table0
601 2263 aaronmk
    for join_ in tables:
602
        table = join_.table
603 2238 aaronmk
604 2343 aaronmk
        # Parse special values
605
        if join_.type_ is sql_gen.filter_out: # filter no match
606 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
607 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
608 2343 aaronmk
609 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
610 2122 aaronmk
611
        left_table = table
612
613 865 aaronmk
    missing = True
614 2376 aaronmk
    if conds != []:
615 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
616
        else: whitespace = '\n'
617 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
618
            .to_str(db) for l, r in conds], 'WHERE')
619 865 aaronmk
        missing = False
620 2227 aaronmk
    if order_by != None:
621 2467 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
622
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
623 865 aaronmk
    if start != None:
624 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
625 865 aaronmk
        missing = False
626
    if missing: warnings.warn(DbWarning(
627
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
628
629 2786 aaronmk
    return query
630 11 aaronmk
631 2054 aaronmk
def select(db, *args, **kw_args):
632
    '''For params, see mk_select() and run_query()'''
633
    recover = kw_args.pop('recover', None)
634
    cacheable = kw_args.pop('cacheable', True)
635 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
636 2054 aaronmk
637 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
638
        log_level=log_level)
639 2054 aaronmk
640 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
641 3009 aaronmk
    embeddable=False, ignore=False):
642 1960 aaronmk
    '''
643
    @param returning str|None An inserted column (such as pkey) to return
644 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
645 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
646
        query will be fully cached, not just if it raises an exception.
647 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
648 1960 aaronmk
    '''
649 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
650 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
651 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
652 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
653 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
654 2063 aaronmk
655 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
656 2063 aaronmk
657 3009 aaronmk
    def mk_insert(select_query):
658
        query = first_line
659 3014 aaronmk
        if cols != None:
660
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
661 3009 aaronmk
        query += '\n'+select_query
662
663
        if returning != None:
664
            returning_name_col = sql_gen.to_name_only_col(returning)
665
            query += '\nRETURNING '+returning_name_col.to_str(db)
666
667
        return query
668 2063 aaronmk
669 3017 aaronmk
    return_type = 'unknown'
670
    if returning != None: return_type = returning.to_str(db)+'%TYPE'
671
672 3009 aaronmk
    lang = 'sql'
673
    if ignore:
674 3017 aaronmk
        # Always return something to set the correct rowcount
675
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
676
677 3009 aaronmk
        embeddable = True # must use function
678
        lang = 'plpgsql'
679 3010 aaronmk
680 3092 aaronmk
        if cols == None:
681
            row = [sql_gen.Col(sql_gen.all_cols, 'row')]
682
            row_vars = [sql_gen.Table('row')]
683
        else:
684
            row_vars = row = [sql_gen.Col(c.name, 'row') for c in cols]
685
686 3009 aaronmk
        query = '''\
687 3010 aaronmk
DECLARE
688 3014 aaronmk
    row '''+table.to_str(db)+'''%ROWTYPE;
689 3009 aaronmk
BEGIN
690 3019 aaronmk
    /* Need an EXCEPTION block for each individual row because "When an error is
691
    caught by an EXCEPTION clause, [...] all changes to persistent database
692
    state within the block are rolled back."
693
    This is unfortunate because "A block containing an EXCEPTION clause is
694
    significantly more expensive to enter and exit than a block without one."
695 3015 aaronmk
    (http://www.postgresql.org/docs/8.3/static/plpgsql-control-structures.html\
696
#PLPGSQL-ERROR-TRAPPING)
697
    */
698 3092 aaronmk
    FOR '''+(', '.join((v.to_str(db) for v in row_vars)))+''' IN
699 3034 aaronmk
'''+select_query+'''
700
    LOOP
701 3015 aaronmk
        BEGIN
702 3019 aaronmk
            RETURN QUERY
703 3014 aaronmk
'''+mk_insert(sql_gen.Values(row).to_str(db))+'''
704 3010 aaronmk
;
705 3015 aaronmk
        EXCEPTION
706 3019 aaronmk
            WHEN unique_violation THEN NULL; -- continue to next row
707 3015 aaronmk
        END;
708 3010 aaronmk
    END LOOP;
709
END;\
710 3009 aaronmk
'''
711
    else: query = mk_insert(select_query)
712
713 2070 aaronmk
    if embeddable:
714
        # Create function
715 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
716 2189 aaronmk
        while True:
717
            try:
718 2918 aaronmk
                function = db.TempFunction(function_name)
719 2194 aaronmk
720 2189 aaronmk
                function_query = '''\
721 2698 aaronmk
CREATE FUNCTION '''+function.to_str(db)+'''()
722 3017 aaronmk
RETURNS SETOF '''+return_type+'''
723 3009 aaronmk
LANGUAGE '''+lang+'''
724 2467 aaronmk
AS $$
725 3009 aaronmk
'''+query+'''
726 2467 aaronmk
$$;
727 2070 aaronmk
'''
728 2446 aaronmk
                run_query(db, function_query, recover=True, cacheable=True,
729 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
730 2189 aaronmk
                break # this version was successful
731 2945 aaronmk
            except DuplicateException, e:
732 2189 aaronmk
                function_name = next_version(function_name)
733
                # try again with next version of name
734 2070 aaronmk
735 2337 aaronmk
        # Return query that uses function
736 3009 aaronmk
        cols = None
737
        if returning != None: cols = [returning]
738 2698 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
739 3009 aaronmk
            cols) # AS clause requires function alias
740 2787 aaronmk
        return mk_select(db, func_table, start=0, order_by=None)
741 2070 aaronmk
742 2787 aaronmk
    return query
743 2066 aaronmk
744 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
745 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
746 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
747
        values in
748 2072 aaronmk
    '''
749 2386 aaronmk
    into = kw_args.pop('into', None)
750
    if into != None: kw_args['embeddable'] = True
751 2066 aaronmk
    recover = kw_args.pop('recover', None)
752 3011 aaronmk
    if kw_args.get('ignore', False): recover = True
753 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
754 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
755 2066 aaronmk
756 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
757
        into, recover=recover, cacheable=cacheable, log_level=log_level)
758
    autoanalyze(db, table)
759
    return cur
760 2063 aaronmk
761 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
762 2066 aaronmk
763 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
764 2085 aaronmk
    '''For params, see insert_select()'''
765 1960 aaronmk
    if lists.is_seq(row): cols = None
766
    else:
767
        cols = row.keys()
768
        row = row.values()
769 2738 aaronmk
    row = list(row) # ensure that "== []" works
770 1960 aaronmk
771 2738 aaronmk
    if row == []: query = None
772
    else: query = sql_gen.Values(row).to_str(db)
773 1961 aaronmk
774 2788 aaronmk
    return insert_select(db, table, cols, query, *args, **kw_args)
775 11 aaronmk
776 3056 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False):
777 2402 aaronmk
    '''
778
    @param changes [(col, new_value),...]
779
        * container can be any iterable type
780
        * col: sql_gen.Code|str (for col name)
781
        * new_value: sql_gen.Code|literal value
782
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
783 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
784
        This avoids creating dead rows in PostgreSQL.
785
        * cond must be None
786 2402 aaronmk
    @return str query
787
    '''
788 3057 aaronmk
    table = sql_gen.as_Table(table)
789
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
790
        for c, v in changes]
791
792 3056 aaronmk
    if in_place:
793
        assert cond == None
794 3058 aaronmk
795 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
796
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '
797
            +db.col_info(sql_gen.with_default_table(c, table)).type
798
            +'\nUSING '+v.to_str(db) for c, v in changes))
799 3058 aaronmk
    else:
800
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
801
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
802
            for c, v in changes))
803
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
804 3056 aaronmk
805 2402 aaronmk
    return query
806
807 3074 aaronmk
def update(db, table, *args, **kw_args):
808 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
809
    recover = kw_args.pop('recover', None)
810 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
811 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
812 2402 aaronmk
813 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
814
        cacheable, log_level=log_level)
815
    autoanalyze(db, table)
816
    return cur
817 2402 aaronmk
818 135 aaronmk
def last_insert_id(db):
819 1849 aaronmk
    module = util.root_module(db.db)
820 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
821
    elif module == 'MySQLdb': return db.insert_id()
822
    else: return None
823 13 aaronmk
824 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
825 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
826 2415 aaronmk
    to names that will be distinct among the columns' tables.
827 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
828 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
829
    @param into The table for the new columns.
830 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
831
        columns will be included in the mapping even if they are not in cols.
832
        The tables of the provided Col objects will be changed to into, so make
833
        copies of them if you want to keep the original tables.
834
    @param as_items Whether to return a list of dict items instead of a dict
835 2383 aaronmk
    @return dict(orig_col=new_col, ...)
836
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
837 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
838
        * All mappings use the into table so its name can easily be
839 2383 aaronmk
          changed for all columns at once
840
    '''
841 2415 aaronmk
    cols = lists.uniqify(cols)
842
843 2394 aaronmk
    items = []
844 2389 aaronmk
    for col in preserve:
845 2390 aaronmk
        orig_col = copy.copy(col)
846 2392 aaronmk
        col.table = into
847 2394 aaronmk
        items.append((orig_col, col))
848
    preserve = set(preserve)
849
    for col in cols:
850 2716 aaronmk
        if col not in preserve:
851
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
852 2394 aaronmk
853
    if not as_items: items = dict(items)
854
    return items
855 2383 aaronmk
856 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
857 2391 aaronmk
    '''For params, see mk_flatten_mapping()
858
    @return See return value of mk_flatten_mapping()
859
    '''
860 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
861
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
862 2786 aaronmk
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
863 2846 aaronmk
        into=into, add_indexes_=True)
864 2394 aaronmk
    return dict(items)
865 2391 aaronmk
866 3079 aaronmk
##### Database structure introspection
867 2414 aaronmk
868 3079 aaronmk
#### Tables
869
870
def tables(db, schema_like='public', table_like='%', exact=False):
871
    if exact: compare = '='
872
    else: compare = 'LIKE'
873
874
    module = util.root_module(db.db)
875
    if module == 'psycopg2':
876
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
877
            ('tablename', sql_gen.CompareCond(table_like, compare))]
878
        return values(select(db, 'pg_tables', ['tablename'], conds,
879
            order_by='tablename', log_level=4))
880
    elif module == 'MySQLdb':
881
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
882
            , cacheable=True, log_level=4))
883
    else: raise NotImplementedError("Can't list tables for "+module+' database')
884
885
def table_exists(db, table):
886
    table = sql_gen.as_Table(table)
887
    return list(tables(db, table.schema, table.name, exact=True)) != []
888
889 2426 aaronmk
def table_row_count(db, table, recover=None):
890 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
891 2443 aaronmk
        order_by=None, start=0), recover=recover, log_level=3))
892 2426 aaronmk
893 2414 aaronmk
def table_cols(db, table, recover=None):
894
    return list(col_names(select(db, table, limit=0, order_by=None,
895 2443 aaronmk
        recover=recover, log_level=4)))
896 2414 aaronmk
897 2291 aaronmk
def pkey(db, table, recover=None):
898 832 aaronmk
    '''Assumed to be first column in table'''
899 2339 aaronmk
    return table_cols(db, table, recover)[0]
900 832 aaronmk
901 2559 aaronmk
not_null_col = 'not_null_col'
902 2340 aaronmk
903
def table_not_null_col(db, table, recover=None):
904
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
905
    if not_null_col in table_cols(db, table, recover): return not_null_col
906
    else: return pkey(db, table, recover)
907
908 853 aaronmk
def index_cols(db, table, index):
909
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
910
    automatically created. When you don't know whether something is a UNIQUE
911
    constraint or a UNIQUE index, use this function.'''
912 1909 aaronmk
    module = util.root_module(db.db)
913
    if module == 'psycopg2':
914
        return list(values(run_query(db, '''\
915 853 aaronmk
SELECT attname
916 866 aaronmk
FROM
917
(
918
        SELECT attnum, attname
919
        FROM pg_index
920
        JOIN pg_class index ON index.oid = indexrelid
921
        JOIN pg_class table_ ON table_.oid = indrelid
922
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
923
        WHERE
924 2782 aaronmk
            table_.relname = '''+db.esc_value(table)+'''
925
            AND index.relname = '''+db.esc_value(index)+'''
926 866 aaronmk
    UNION
927
        SELECT attnum, attname
928
        FROM
929
        (
930
            SELECT
931
                indrelid
932
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
933
                    AS indkey
934
            FROM pg_index
935
            JOIN pg_class index ON index.oid = indexrelid
936
            JOIN pg_class table_ ON table_.oid = indrelid
937
            WHERE
938 2782 aaronmk
                table_.relname = '''+db.esc_value(table)+'''
939
                AND index.relname = '''+db.esc_value(index)+'''
940 866 aaronmk
        ) s
941
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
942
) s
943 853 aaronmk
ORDER BY attnum
944 2782 aaronmk
'''
945
            , cacheable=True, log_level=4)))
946 1909 aaronmk
    else: raise NotImplementedError("Can't list index columns for "+module+
947
        ' database')
948 853 aaronmk
949 464 aaronmk
def constraint_cols(db, table, constraint):
950 1849 aaronmk
    module = util.root_module(db.db)
951 464 aaronmk
    if module == 'psycopg2':
952
        return list(values(run_query(db, '''\
953
SELECT attname
954
FROM pg_constraint
955
JOIN pg_class ON pg_class.oid = conrelid
956
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
957
WHERE
958 2783 aaronmk
    relname = '''+db.esc_value(table)+'''
959
    AND conname = '''+db.esc_value(constraint)+'''
960 464 aaronmk
ORDER BY attnum
961 2783 aaronmk
'''
962
            )))
963 464 aaronmk
    else: raise NotImplementedError("Can't list constraint columns for "+module+
964
        ' database')
965
966 3079 aaronmk
#### Functions
967
968
def function_exists(db, function):
969
    function = sql_gen.as_Function(function)
970
971
    info_table = sql_gen.Table('routines', 'information_schema')
972
    conds = [('routine_name', function.name)]
973
    schema = function.schema
974
    if schema != None: conds.append(('routine_schema', schema))
975
    # Exclude trigger functions, since they cannot be called directly
976
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
977
978
    return list(values(select(db, info_table, ['routine_name'], conds,
979
        order_by='routine_schema', limit=1, log_level=4))) != []
980
        # TODO: order_by search_path schema order
981
982
##### Structural changes
983
984
#### Columns
985
986
def add_col(db, table, col, comment=None, **kw_args):
987
    '''
988
    @param col TypedCol Name may be versioned, so be sure to propagate any
989
        renaming back to any source column for the TypedCol.
990
    @param comment None|str SQL comment used to distinguish columns of the same
991
        name from each other when they contain different data, to allow the
992
        ADD COLUMN query to be cached. If not set, query will not be cached.
993
    '''
994
    assert isinstance(col, sql_gen.TypedCol)
995
996
    while True:
997
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
998
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
999
1000
        try:
1001
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1002
            break
1003
        except DuplicateException:
1004
            col.name = next_version(col.name)
1005
            # try again with next version of name
1006
1007
def add_not_null(db, col):
1008
    table = col.table
1009
    col = sql_gen.to_name_only_col(col)
1010
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1011
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1012
1013 2096 aaronmk
row_num_col = '_row_num'
1014
1015 3079 aaronmk
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
1016
    constraints='PRIMARY KEY')
1017
1018
def add_row_num(db, table):
1019
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1020
    be the primary key.'''
1021
    add_col(db, table, row_num_typed_col, log_level=3)
1022
1023
#### Indexes
1024
1025
def add_pkey(db, table, cols=None, recover=None):
1026
    '''Adds a primary key.
1027
    @param cols [sql_gen.Col,...] The columns in the primary key.
1028
        Defaults to the first column in the table.
1029
    @pre The table must not already have a primary key.
1030
    '''
1031
    table = sql_gen.as_Table(table)
1032
    if cols == None: cols = [pkey(db, table, recover)]
1033
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1034
1035
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1036
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1037
        log_ignore_excs=(DuplicateException,))
1038
1039 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1040 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1041 2538 aaronmk
    Currently, only function calls are supported as expressions.
1042 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1043 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1044 2538 aaronmk
    '''
1045 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1046 2538 aaronmk
1047 2688 aaronmk
    # Parse exprs
1048
    old_exprs = exprs[:]
1049
    exprs = []
1050
    cols = []
1051
    for i, expr in enumerate(old_exprs):
1052 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1053 2688 aaronmk
1054 2823 aaronmk
        # Handle nullable columns
1055 2998 aaronmk
        if ensure_not_null_:
1056
            try: expr = ensure_not_null(db, expr)
1057 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1058 2823 aaronmk
1059 2688 aaronmk
        # Extract col
1060 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1061 2688 aaronmk
        if isinstance(expr, sql_gen.FunctionCall):
1062
            col = expr.args[0]
1063
            expr = sql_gen.Expr(expr)
1064
        else: col = expr
1065 2823 aaronmk
        assert isinstance(col, sql_gen.Col)
1066 2688 aaronmk
1067
        # Extract table
1068
        if table == None:
1069
            assert sql_gen.is_table_col(col)
1070
            table = col.table
1071
1072
        col.table = None
1073
1074
        exprs.append(expr)
1075
        cols.append(col)
1076 2408 aaronmk
1077 2688 aaronmk
    table = sql_gen.as_Table(table)
1078
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
1079
1080 3005 aaronmk
    # Add index
1081
    while True:
1082
        str_ = 'CREATE'
1083
        if unique: str_ += ' UNIQUE'
1084
        str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
1085
            ', '.join((v.to_str(db) for v in exprs)))+')'
1086
1087
        try:
1088
            run_query(db, str_, recover=True, cacheable=True, log_level=3,
1089
                log_ignore_excs=(DuplicateException,))
1090
            break
1091
        except DuplicateException:
1092
            index.name = next_version(index.name)
1093
            # try again with next version of name
1094 2408 aaronmk
1095 2997 aaronmk
def add_index_col(db, col, suffix, expr, nullable=True):
1096 3000 aaronmk
    if sql_gen.index_col(col) != None: return # already has index col
1097 2997 aaronmk
1098
    new_col = sql_gen.suffixed_col(col, suffix)
1099
1100 3006 aaronmk
    # Add column
1101 3038 aaronmk
    new_typed_col = sql_gen.TypedCol(new_col.name, db.col_info(col).type)
1102 3045 aaronmk
    add_col(db, col.table, new_typed_col, comment='src: '+repr(col),
1103
        log_level=3)
1104 3037 aaronmk
    new_col.name = new_typed_col.name # propagate any renaming
1105 3006 aaronmk
1106 3064 aaronmk
    update(db, col.table, [(new_col, expr)], in_place=True, cacheable=True,
1107
        log_level=3)
1108 2997 aaronmk
    if not nullable: add_not_null(db, new_col)
1109
    add_index(db, new_col)
1110
1111 3104 aaronmk
    col.table.index_cols[col.name] = new_col.name
1112 2997 aaronmk
1113 3047 aaronmk
# Controls when ensure_not_null() will use index columns
1114
not_null_index_cols_min_rows = 0 # rows; initially always use index columns
1115
1116 2997 aaronmk
def ensure_not_null(db, col):
1117
    '''For params, see sql_gen.ensure_not_null()'''
1118
    expr = sql_gen.ensure_not_null(db, col)
1119
1120 3047 aaronmk
    # If a nullable column in a temp table, add separate index column instead.
1121
    # Note that for small datasources, this adds 6-25% to the total import time.
1122
    if (sql_gen.is_temp_col(col) and isinstance(expr, sql_gen.EnsureNotNull)
1123
        and table_row_count(db, col.table) >= not_null_index_cols_min_rows):
1124 2997 aaronmk
        add_index_col(db, col, '::NOT NULL', expr, nullable=False)
1125 3000 aaronmk
        expr = sql_gen.index_col(col)
1126 2997 aaronmk
1127
    return expr
1128
1129 3083 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1130
1131
def add_indexes(db, table, has_pkey=True):
1132
    '''Adds an index on all columns in a table.
1133
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1134
        index should be added on the first column.
1135
        * If already_indexed, the pkey is assumed to have already been added
1136
    '''
1137
    cols = table_cols(db, table)
1138
    if has_pkey:
1139
        if has_pkey is not already_indexed: add_pkey(db, table)
1140
        cols = cols[1:]
1141
    for col in cols: add_index(db, col, table)
1142
1143 3079 aaronmk
#### Tables
1144 2772 aaronmk
1145 3079 aaronmk
### Maintenance
1146 2772 aaronmk
1147 3079 aaronmk
def analyze(db, table):
1148
    table = sql_gen.as_Table(table)
1149
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1150 2934 aaronmk
1151 3079 aaronmk
def autoanalyze(db, table):
1152
    if db.autoanalyze: analyze(db, table)
1153 2935 aaronmk
1154 3079 aaronmk
def vacuum(db, table):
1155
    table = sql_gen.as_Table(table)
1156
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1157
        log_level=3))
1158 2086 aaronmk
1159 3079 aaronmk
### Lifecycle
1160
1161 2889 aaronmk
def drop_table(db, table):
1162
    table = sql_gen.as_Table(table)
1163
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1164
1165 3082 aaronmk
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1166
    like=None):
1167 2675 aaronmk
    '''Creates a table.
1168 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1169
    @param has_pkey If set, the first column becomes the primary key.
1170 2760 aaronmk
    @param col_indexes bool|[ref]
1171
        * If True, indexes will be added on all non-pkey columns.
1172
        * If a list reference, [0] will be set to a function to do this.
1173
          This can be used to delay index creation until the table is populated.
1174 2675 aaronmk
    '''
1175
    table = sql_gen.as_Table(table)
1176
1177 3082 aaronmk
    if like != None:
1178
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1179
            ]+cols
1180 2681 aaronmk
    if has_pkey:
1181
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1182 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1183 2681 aaronmk
1184 3085 aaronmk
    temp = table.is_temp and not db.debug_temp
1185
        # temp tables permanent in debug_temp mode
1186 2760 aaronmk
1187 3085 aaronmk
    # Create table
1188
    while True:
1189
        str_ = 'CREATE'
1190
        if temp: str_ += ' TEMP'
1191
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1192
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1193
        str_ += '\n);\n'
1194
1195
        try:
1196
            run_query(db, str_, cacheable=True, log_level=2,
1197
                log_ignore_excs=(DuplicateException,))
1198
            break
1199
        except DuplicateException:
1200
            table.name = next_version(table.name)
1201
            # try again with next version of name
1202
1203 2760 aaronmk
    # Add indexes
1204 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1205
    def add_indexes_(): add_indexes(db, table, has_pkey)
1206
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1207
    elif col_indexes: add_indexes_() # add now
1208 2675 aaronmk
1209 3084 aaronmk
def copy_table_struct(db, src, dest):
1210
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1211 3085 aaronmk
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1212 3084 aaronmk
1213 3079 aaronmk
### Data
1214 2684 aaronmk
1215 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1216
    '''For params, see run_query()'''
1217 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1218 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1219 2732 aaronmk
1220 2965 aaronmk
def empty_temp(db, tables):
1221 2972 aaronmk
    if db.debug_temp: return # leave temp tables there for debugging
1222 2965 aaronmk
    tables = lists.mk_seq(tables)
1223 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1224 2965 aaronmk
1225 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1226
    '''For kw_args, see tables()'''
1227
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1228 3094 aaronmk
1229
def distinct_table(db, table, distinct_on):
1230
    '''Creates a copy of a temp table which is distinct on the given columns.
1231 3099 aaronmk
    The old and new tables will both get an index on these columns, to
1232
    facilitate merge joins.
1233 3097 aaronmk
    @param distinct_on If empty, creates a table with one row. This is useful if
1234
        your distinct_on columns are all literal values.
1235 3099 aaronmk
    @return The new table.
1236 3094 aaronmk
    '''
1237 3099 aaronmk
    new_table = sql_gen.suffixed_table(table, '_distinct')
1238 3094 aaronmk
1239 3099 aaronmk
    copy_table_struct(db, table, new_table)
1240 3097 aaronmk
1241
    limit = None
1242
    if distinct_on == []: limit = 1 # one sample row
1243 3099 aaronmk
    else:
1244
        add_index(db, distinct_on, new_table, unique=True)
1245
        add_index(db, distinct_on, table) # for join optimization
1246 3097 aaronmk
1247 3099 aaronmk
    insert_select(db, new_table, None, mk_select(db, table, start=0,
1248 3097 aaronmk
        limit=limit), ignore=True)
1249 3099 aaronmk
    analyze(db, new_table)
1250 3094 aaronmk
1251 3099 aaronmk
    return new_table