Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 11 aaronmk
import re
5 865 aaronmk
import warnings
6 11 aaronmk
7 300 aaronmk
import exc
8 1909 aaronmk
import dicts
9 1893 aaronmk
import iters
10 1960 aaronmk
import lists
11 1889 aaronmk
from Proxy import Proxy
12 1872 aaronmk
import rand
13 2217 aaronmk
import sql_gen
14 862 aaronmk
import strings
15 131 aaronmk
import util
16 11 aaronmk
17 832 aaronmk
##### Exceptions
18
19 2804 aaronmk
def get_cur_query(cur, input_query=None):
20 2168 aaronmk
    raw_query = None
21
    if hasattr(cur, 'query'): raw_query = cur.query
22
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
23 2170 aaronmk
24
    if raw_query != None: return raw_query
25 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
26 14 aaronmk
27 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
28
    '''For params, see get_cur_query()'''
29 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
30 135 aaronmk
31 300 aaronmk
class DbException(exc.ExceptionWithCause):
32 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
33 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
34 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
35
36 2143 aaronmk
class ExceptionWithName(DbException):
37
    def __init__(self, name, cause=None):
38 2484 aaronmk
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
39 2143 aaronmk
        self.name = name
40 360 aaronmk
41 2240 aaronmk
class ExceptionWithNameValue(DbException):
42
    def __init__(self, name, value, cause=None):
43 2484 aaronmk
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
44
            +'; value: '+strings.as_tt(repr(value)), cause)
45 2240 aaronmk
        self.name = name
46
        self.value = value
47
48 2945 aaronmk
class ExceptionWithNameType(DbException):
49
    def __init__(self, type_, name, cause=None):
50
        DbException.__init__(self, 'for type: '+strings.as_tt(str(type_))
51
            +'; name: '+strings.as_tt(name), cause)
52
        self.type = type_
53
        self.name = name
54
55 2306 aaronmk
class ConstraintException(DbException):
56
    def __init__(self, name, cols, cause=None):
57 2484 aaronmk
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
58
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
59 2306 aaronmk
        self.name = name
60 468 aaronmk
        self.cols = cols
61 11 aaronmk
62 2523 aaronmk
class MissingCastException(DbException):
63
    def __init__(self, type_, col, cause=None):
64
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
65
            +' on column: '+strings.as_tt(col), cause)
66
        self.type = type_
67
        self.col = col
68
69 2143 aaronmk
class NameException(DbException): pass
70
71 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
72 13 aaronmk
73 2306 aaronmk
class NullValueException(ConstraintException): pass
74 13 aaronmk
75 2240 aaronmk
class FunctionValueException(ExceptionWithNameValue): pass
76 2239 aaronmk
77 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
78 2143 aaronmk
79 89 aaronmk
class EmptyRowException(DbException): pass
80
81 865 aaronmk
##### Warnings
82
83
class DbWarning(UserWarning): pass
84
85 1930 aaronmk
##### Result retrieval
86
87
def col_names(cur): return (col[0] for col in cur.description)
88
89
def rows(cur): return iter(lambda: cur.fetchone(), None)
90
91
def consume_rows(cur):
92
    '''Used to fetch all rows so result will be cached'''
93
    iters.consume_iter(rows(cur))
94
95
def next_row(cur): return rows(cur).next()
96
97
def row(cur):
98
    row_ = next_row(cur)
99
    consume_rows(cur)
100
    return row_
101
102
def next_value(cur): return next_row(cur)[0]
103
104
def value(cur): return row(cur)[0]
105
106
def values(cur): return iters.func_iter(lambda: next_value(cur))
107
108
def value_or_none(cur):
109
    try: return value(cur)
110
    except StopIteration: return None
111
112 2762 aaronmk
##### Escaping
113 2101 aaronmk
114 2573 aaronmk
def esc_name_by_module(module, name):
115
    if module == 'psycopg2' or module == None: quote = '"'
116 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
117
    else: raise NotImplementedError("Can't escape name for "+module+' database')
118 2500 aaronmk
    return sql_gen.esc_name(name, quote)
119 2101 aaronmk
120
def esc_name_by_engine(engine, name, **kw_args):
121
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
122
123
def esc_name(db, name, **kw_args):
124
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
125
126
def qual_name(db, schema, table):
127
    def esc_name_(name): return esc_name(db, name)
128
    table = esc_name_(table)
129
    if schema != None: return esc_name_(schema)+'.'+table
130
    else: return table
131
132 1869 aaronmk
##### Database connections
133 1849 aaronmk
134 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
135 1926 aaronmk
136 1869 aaronmk
db_engines = {
137
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
138
    'PostgreSQL': ('psycopg2', {}),
139
}
140
141
DatabaseErrors_set = set([DbException])
142
DatabaseErrors = tuple(DatabaseErrors_set)
143
144
def _add_module(module):
145
    DatabaseErrors_set.add(module.DatabaseError)
146
    global DatabaseErrors
147
    DatabaseErrors = tuple(DatabaseErrors_set)
148
149
def db_config_str(db_config):
150
    return db_config['engine']+' database '+db_config['database']
151
152 2448 aaronmk
log_debug_none = lambda msg, level=2: None
153 1901 aaronmk
154 1849 aaronmk
class DbConn:
155 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
156 2915 aaronmk
        log_debug=log_debug_none, debug_temp=False):
157
        '''
158
        @param debug_temp Whether temporary objects should instead be permanent.
159
            This assists in debugging the internal objects used by the program.
160
        '''
161 1869 aaronmk
        self.db_config = db_config
162 2190 aaronmk
        self.autocommit = autocommit
163
        self.caching = caching
164 1901 aaronmk
        self.log_debug = log_debug
165 2193 aaronmk
        self.debug = log_debug != log_debug_none
166 2915 aaronmk
        self.debug_temp = debug_temp
167 3074 aaronmk
        self.autoanalyze = False
168 1869 aaronmk
169
        self.__db = None
170 1889 aaronmk
        self.query_results = {}
171 2139 aaronmk
        self._savepoint = 0
172 2671 aaronmk
        self._notices_seen = set()
173 1869 aaronmk
174
    def __getattr__(self, name):
175
        if name == '__dict__': raise Exception('getting __dict__')
176
        if name == 'db': return self._db()
177
        else: raise AttributeError()
178
179
    def __getstate__(self):
180
        state = copy.copy(self.__dict__) # shallow copy
181 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
182 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
183
        return state
184
185 2165 aaronmk
    def connected(self): return self.__db != None
186
187 1869 aaronmk
    def _db(self):
188
        if self.__db == None:
189
            # Process db_config
190
            db_config = self.db_config.copy() # don't modify input!
191 2097 aaronmk
            schemas = db_config.pop('schemas', None)
192 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
193
            module = __import__(module_name)
194
            _add_module(module)
195
            for orig, new in mappings.iteritems():
196
                try: util.rename_key(db_config, orig, new)
197
                except KeyError: pass
198
199
            # Connect
200
            self.__db = module.connect(**db_config)
201
202
            # Configure connection
203 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
204
                import psycopg2.extensions
205
                self.db.set_isolation_level(
206
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
207 2101 aaronmk
            if schemas != None:
208 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
209
                search_path.append(value(run_query(self, 'SHOW search_path',
210
                    log_level=4)))
211
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
212
                    log_level=3)
213 1869 aaronmk
214
        return self.__db
215 1889 aaronmk
216 1891 aaronmk
    class DbCursor(Proxy):
217 1927 aaronmk
        def __init__(self, outer):
218 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
219 2191 aaronmk
            self.outer = outer
220 1927 aaronmk
            self.query_results = outer.query_results
221 1894 aaronmk
            self.query_lookup = None
222 1891 aaronmk
            self.result = []
223 1889 aaronmk
224 2802 aaronmk
        def execute(self, query):
225 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
226 2797 aaronmk
            self.query_lookup = query
227 2148 aaronmk
            try:
228 2191 aaronmk
                try:
229 2802 aaronmk
                    cur = self.inner.execute(query)
230 2191 aaronmk
                    self.outer.do_autocommit()
231 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
232 1904 aaronmk
            except Exception, e:
233 2802 aaronmk
                _add_cursor_info(e, self, query)
234 1904 aaronmk
                self.result = e # cache the exception as the result
235
                self._cache_result()
236
                raise
237 3004 aaronmk
238
            # Always cache certain queries
239
            if query.startswith('CREATE') or query.startswith('ALTER'):
240 3007 aaronmk
                # structural changes
241 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
242
                # so don't cache ADD COLUMN unless it has distinguishing comment
243
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
244 3007 aaronmk
                    self._cache_result()
245 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
246 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
247 3004 aaronmk
248 2762 aaronmk
            return cur
249 1894 aaronmk
250 1891 aaronmk
        def fetchone(self):
251
            row = self.inner.fetchone()
252 1899 aaronmk
            if row != None: self.result.append(row)
253
            # otherwise, fetched all rows
254 1904 aaronmk
            else: self._cache_result()
255
            return row
256
257
        def _cache_result(self):
258 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
259
            # inserts are not idempotent. Other non-SELECT queries don't have
260
            # their result set read, so only exceptions will be cached (an
261
            # invalid query will always be invalid).
262 1930 aaronmk
            if self.query_results != None and (not self._is_insert
263 1906 aaronmk
                or isinstance(self.result, Exception)):
264
265 1894 aaronmk
                assert self.query_lookup != None
266 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
267
                    util.dict_subset(dicts.AttrsDictView(self),
268
                    ['query', 'result', 'rowcount', 'description']))
269 1906 aaronmk
270 1916 aaronmk
        class CacheCursor:
271
            def __init__(self, cached_result): self.__dict__ = cached_result
272
273 1927 aaronmk
            def execute(self, *args, **kw_args):
274 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
275
                # otherwise, result is a rows list
276
                self.iter = iter(self.result)
277
278
            def fetchone(self):
279
                try: return self.iter.next()
280
                except StopIteration: return None
281 1891 aaronmk
282 2212 aaronmk
    def esc_value(self, value):
283 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
284
        except NotImplementedError, e:
285
            module = util.root_module(self.db)
286
            if module == 'MySQLdb':
287
                import _mysql
288
                str_ = _mysql.escape_string(value)
289
            else: raise e
290 2374 aaronmk
        return strings.to_unicode(str_)
291 2212 aaronmk
292 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
293
294 2814 aaronmk
    def std_code(self, str_):
295
        '''Standardizes SQL code.
296
        * Ensures that string literals are prefixed by `E`
297
        '''
298
        if str_.startswith("'"): str_ = 'E'+str_
299
        return str_
300
301 2665 aaronmk
    def can_mogrify(self):
302 2663 aaronmk
        module = util.root_module(self.db)
303 2665 aaronmk
        return module == 'psycopg2'
304 2663 aaronmk
305 2665 aaronmk
    def mogrify(self, query, params=None):
306
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
307
        else: raise NotImplementedError("Can't mogrify query")
308
309 2671 aaronmk
    def print_notices(self):
310 2725 aaronmk
        if hasattr(self.db, 'notices'):
311
            for msg in self.db.notices:
312
                if msg not in self._notices_seen:
313
                    self._notices_seen.add(msg)
314
                    self.log_debug(msg, level=2)
315 2671 aaronmk
316 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
317 2464 aaronmk
        debug_msg_ref=None):
318 2445 aaronmk
        '''
319 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
320
            throws one of these exceptions.
321 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
322
            this instead of being output. This allows you to filter log messages
323
            depending on the result of the query.
324 2445 aaronmk
        '''
325 2167 aaronmk
        assert query != None
326
327 2047 aaronmk
        if not self.caching: cacheable = False
328 1903 aaronmk
        used_cache = False
329 2664 aaronmk
330
        def log_msg(query):
331
            if used_cache: cache_status = 'cache hit'
332
            elif cacheable: cache_status = 'cache miss'
333
            else: cache_status = 'non-cacheable'
334
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
335
336 1903 aaronmk
        try:
337 1927 aaronmk
            # Get cursor
338
            if cacheable:
339
                try:
340 2797 aaronmk
                    cur = self.query_results[query]
341 1927 aaronmk
                    used_cache = True
342
                except KeyError: cur = self.DbCursor(self)
343
            else: cur = self.db.cursor()
344
345 2664 aaronmk
            # Log query
346
            if self.debug and debug_msg_ref == None: # log before running
347
                self.log_debug(log_msg(query), log_level)
348
349 1927 aaronmk
            # Run query
350 2793 aaronmk
            cur.execute(query)
351 1903 aaronmk
        finally:
352 2671 aaronmk
            self.print_notices()
353 2664 aaronmk
            if self.debug and debug_msg_ref != None: # return after running
354 2793 aaronmk
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
355 1903 aaronmk
356
        return cur
357 1914 aaronmk
358 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
359 2139 aaronmk
360 2907 aaronmk
    def with_autocommit(self, func):
361 2801 aaronmk
        import psycopg2.extensions
362
363
        prev_isolation_level = self.db.isolation_level
364 2907 aaronmk
        self.db.set_isolation_level(
365
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
366 2683 aaronmk
        try: return func()
367 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
368 2683 aaronmk
369 2139 aaronmk
    def with_savepoint(self, func):
370 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
371 2443 aaronmk
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
372 2139 aaronmk
        self._savepoint += 1
373 2930 aaronmk
        try: return func()
374 2139 aaronmk
        except:
375 2443 aaronmk
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
376 2139 aaronmk
            raise
377 2930 aaronmk
        finally:
378
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
379
            # "The savepoint remains valid and can be rolled back to again"
380
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
381 2443 aaronmk
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
382 2930 aaronmk
383
            self._savepoint -= 1
384
            assert self._savepoint >= 0
385
386
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
387 2191 aaronmk
388
    def do_autocommit(self):
389
        '''Autocommits if outside savepoint'''
390
        assert self._savepoint >= 0
391
        if self.autocommit and self._savepoint == 0:
392 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
393 2191 aaronmk
            self.db.commit()
394 2643 aaronmk
395 2819 aaronmk
    def col_info(self, col):
396 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
397 3063 aaronmk
        type_ = sql_gen.Coalesce(sql_gen.Nullif(sql_gen.Col('data_type'),
398
            'USER-DEFINED'), sql_gen.Col('udt_name'))
399 3078 aaronmk
        cols = [type_, 'column_default',
400
            sql_gen.Cast('boolean', sql_gen.Col('is_nullable'))]
401 2643 aaronmk
402
        conds = [('table_name', col.table.name), ('column_name', col.name)]
403
        schema = col.table.schema
404
        if schema != None: conds.append(('table_schema', schema))
405
406 2819 aaronmk
        type_, default, nullable = row(select(self, table, cols, conds,
407 3059 aaronmk
            order_by='table_schema', limit=1, cacheable=False, log_level=4))
408 2643 aaronmk
            # TODO: order_by search_path schema order
409 2819 aaronmk
        default = sql_gen.as_Code(default, self)
410
411
        return sql_gen.TypedCol(col.name, type_, default, nullable)
412 2917 aaronmk
413
    def TempFunction(self, name):
414
        if self.debug_temp: schema = None
415
        else: schema = 'pg_temp'
416
        return sql_gen.Function(name, schema)
417 1849 aaronmk
418 1869 aaronmk
connect = DbConn
419
420 832 aaronmk
##### Recoverable querying
421 15 aaronmk
422 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
423 11 aaronmk
424 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
425
    log_ignore_excs=None, **kw_args):
426 2794 aaronmk
    '''For params, see DbConn.run_query()'''
427 830 aaronmk
    if recover == None: recover = False
428 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
429
    log_ignore_excs = tuple(log_ignore_excs)
430 830 aaronmk
431 2666 aaronmk
    debug_msg_ref = None # usually, db.run_query() logs query before running it
432
    # But if filtering with log_ignore_excs, wait until after exception parsing
433 2984 aaronmk
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None]
434 2666 aaronmk
435 2148 aaronmk
    try:
436 2464 aaronmk
        try:
437 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
438 2793 aaronmk
                debug_msg_ref, **kw_args)
439 2796 aaronmk
            if recover and not db.is_cached(query):
440 2464 aaronmk
                return with_savepoint(db, run)
441
            else: return run() # don't need savepoint if cached
442
        except Exception, e:
443 3095 aaronmk
            msg = strings.ustr(e.args[0])
444 2464 aaronmk
445 3095 aaronmk
            match = re.match(r'^duplicate key value violates unique constraint '
446 3096 aaronmk
                r'"((_?[^\W_]+(?=[._]))?.+?)"', msg)
447 2464 aaronmk
            if match:
448
                constraint, table = match.groups()
449 3025 aaronmk
                cols = []
450
                if recover: # need auto-rollback to run index_cols()
451
                    try: cols = index_cols(db, table, constraint)
452
                    except NotImplementedError: pass
453
                raise DuplicateKeyException(constraint, cols, e)
454 2464 aaronmk
455 3095 aaronmk
            match = re.match(r'^null value in column "(.+?)" violates not-null'
456 2464 aaronmk
                r' constraint', msg)
457
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
458
459 3095 aaronmk
            match = re.match(r'^(?:invalid input (?:syntax|value)\b.*?'
460 2464 aaronmk
                r'|date/time field value out of range): "(.+?)"\n'
461 2535 aaronmk
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
462 2464 aaronmk
            if match:
463
                value, name = match.groups()
464
                raise FunctionValueException(name, strings.to_unicode(value), e)
465
466 3095 aaronmk
            match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
467 2523 aaronmk
                r'is of type', msg)
468
            if match:
469
                col, type_ = match.groups()
470
                raise MissingCastException(type_, col, e)
471
472 3095 aaronmk
            match = re.match(r'^(\S+) "(.+?)".*? already exists', msg)
473 2945 aaronmk
            if match:
474
                type_, name = match.groups()
475
                raise DuplicateException(type_, name, e)
476 2464 aaronmk
477
            raise # no specific exception raised
478
    except log_ignore_excs:
479
        log_level += 2
480
        raise
481
    finally:
482 2666 aaronmk
        if debug_msg_ref != None and debug_msg_ref[0] != None:
483
            db.log_debug(debug_msg_ref[0], log_level)
484 830 aaronmk
485 832 aaronmk
##### Basic queries
486
487 2153 aaronmk
def next_version(name):
488 2163 aaronmk
    version = 1 # first existing name was version 0
489 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
490 2153 aaronmk
    if match:
491 2586 aaronmk
        name, version = match.groups()
492
        version = int(version)+1
493 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
494 2153 aaronmk
495 2899 aaronmk
def lock_table(db, table, mode):
496
    table = sql_gen.as_Table(table)
497
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
498
499 2789 aaronmk
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
500 2085 aaronmk
    '''Outputs a query to a temp table.
501
    For params, see run_query().
502
    '''
503 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
504 2790 aaronmk
505
    assert isinstance(into, sql_gen.Table)
506
507 2992 aaronmk
    into.is_temp = True
508 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
509
    into.schema = None
510 2992 aaronmk
511 2790 aaronmk
    kw_args['recover'] = True
512 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
513 2790 aaronmk
514 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
515 2790 aaronmk
516
    # Create table
517
    while True:
518
        create_query = 'CREATE'
519
        if temp: create_query += ' TEMP'
520
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
521 2385 aaronmk
522 2790 aaronmk
        try:
523
            cur = run_query(db, create_query, **kw_args)
524
                # CREATE TABLE AS sets rowcount to # rows in query
525
            break
526 2945 aaronmk
        except DuplicateException, e:
527 2790 aaronmk
            into.name = next_version(into.name)
528
            # try again with next version of name
529
530
    if add_indexes_: add_indexes(db, into)
531 3075 aaronmk
532
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
533
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
534
    # table is going to be used in complex queries, it is wise to run ANALYZE on
535
    # the temporary table after it is populated."
536
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
537
    # If into is not a temp table, ANALYZE is useful but not required.
538 3073 aaronmk
    analyze(db, into)
539 2790 aaronmk
540
    return cur
541 2085 aaronmk
542 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
543
544 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
545
546 2233 aaronmk
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
547 2293 aaronmk
    start=None, order_by=order_by_pkey, default_table=None):
548 1981 aaronmk
    '''
549 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
550 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
551 1981 aaronmk
    @param fields Use None to select all fields in the table
552 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
553 2379 aaronmk
        * container can be any iterable type
554 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
555
        * compare_right_side: sql_gen.ValueCond|literal value
556 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
557
        use all columns
558 2786 aaronmk
    @return query
559 1981 aaronmk
    '''
560 2315 aaronmk
    # Parse tables param
561 2964 aaronmk
    tables = lists.mk_seq(tables)
562 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
563 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
564 2121 aaronmk
565 2315 aaronmk
    # Parse other params
566 2376 aaronmk
    if conds == None: conds = []
567 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
568 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
569 135 aaronmk
    assert limit == None or type(limit) == int
570 865 aaronmk
    assert start == None or type(start) == int
571 2315 aaronmk
    if order_by is order_by_pkey:
572
        if distinct_on != []: order_by = None
573
        else: order_by = pkey(db, table0, recover=True)
574 865 aaronmk
575 2315 aaronmk
    query = 'SELECT'
576 2056 aaronmk
577 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
578 2056 aaronmk
579 2200 aaronmk
    # DISTINCT ON columns
580 2233 aaronmk
    if distinct_on != []:
581 2467 aaronmk
        query += '\nDISTINCT'
582 2254 aaronmk
        if distinct_on is not distinct_on_all:
583 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
584
585
    # Columns
586 3027 aaronmk
    if fields == None:
587
        if query.find('\n') >= 0: whitespace = '\n'
588
        else: whitespace = ' '
589
        query += whitespace+'*'
590 2765 aaronmk
    else:
591
        assert fields != []
592 3027 aaronmk
        query += '\n'+('\n, '.join(map(parse_col, fields)))
593 2200 aaronmk
594
    # Main table
595 2467 aaronmk
    query += '\nFROM '+table0.to_str(db)
596 865 aaronmk
597 2122 aaronmk
    # Add joins
598 2271 aaronmk
    left_table = table0
599 2263 aaronmk
    for join_ in tables:
600
        table = join_.table
601 2238 aaronmk
602 2343 aaronmk
        # Parse special values
603
        if join_.type_ is sql_gen.filter_out: # filter no match
604 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
605 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
606 2343 aaronmk
607 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
608 2122 aaronmk
609
        left_table = table
610
611 865 aaronmk
    missing = True
612 2376 aaronmk
    if conds != []:
613 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
614
        else: whitespace = '\n'
615 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
616
            .to_str(db) for l, r in conds], 'WHERE')
617 865 aaronmk
        missing = False
618 2227 aaronmk
    if order_by != None:
619 2467 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
620
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
621 865 aaronmk
    if start != None:
622 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
623 865 aaronmk
        missing = False
624
    if missing: warnings.warn(DbWarning(
625
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
626
627 2786 aaronmk
    return query
628 11 aaronmk
629 2054 aaronmk
def select(db, *args, **kw_args):
630
    '''For params, see mk_select() and run_query()'''
631
    recover = kw_args.pop('recover', None)
632
    cacheable = kw_args.pop('cacheable', True)
633 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
634 2054 aaronmk
635 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
636
        log_level=log_level)
637 2054 aaronmk
638 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
639 3009 aaronmk
    embeddable=False, ignore=False):
640 1960 aaronmk
    '''
641
    @param returning str|None An inserted column (such as pkey) to return
642 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
643 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
644
        query will be fully cached, not just if it raises an exception.
645 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
646 1960 aaronmk
    '''
647 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
648 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
649 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
650 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
651 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
652 2063 aaronmk
653 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
654 2063 aaronmk
655 3009 aaronmk
    def mk_insert(select_query):
656
        query = first_line
657 3014 aaronmk
        if cols != None:
658
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
659 3009 aaronmk
        query += '\n'+select_query
660
661
        if returning != None:
662
            returning_name_col = sql_gen.to_name_only_col(returning)
663
            query += '\nRETURNING '+returning_name_col.to_str(db)
664
665
        return query
666 2063 aaronmk
667 3017 aaronmk
    return_type = 'unknown'
668
    if returning != None: return_type = returning.to_str(db)+'%TYPE'
669
670 3009 aaronmk
    lang = 'sql'
671
    if ignore:
672 3017 aaronmk
        # Always return something to set the correct rowcount
673
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
674
675 3009 aaronmk
        embeddable = True # must use function
676
        lang = 'plpgsql'
677 3010 aaronmk
678 3092 aaronmk
        if cols == None:
679
            row = [sql_gen.Col(sql_gen.all_cols, 'row')]
680
            row_vars = [sql_gen.Table('row')]
681
        else:
682
            row_vars = row = [sql_gen.Col(c.name, 'row') for c in cols]
683
684 3009 aaronmk
        query = '''\
685 3010 aaronmk
DECLARE
686 3014 aaronmk
    row '''+table.to_str(db)+'''%ROWTYPE;
687 3009 aaronmk
BEGIN
688 3019 aaronmk
    /* Need an EXCEPTION block for each individual row because "When an error is
689
    caught by an EXCEPTION clause, [...] all changes to persistent database
690
    state within the block are rolled back."
691
    This is unfortunate because "A block containing an EXCEPTION clause is
692
    significantly more expensive to enter and exit than a block without one."
693 3015 aaronmk
    (http://www.postgresql.org/docs/8.3/static/plpgsql-control-structures.html\
694
#PLPGSQL-ERROR-TRAPPING)
695
    */
696 3092 aaronmk
    FOR '''+(', '.join((v.to_str(db) for v in row_vars)))+''' IN
697 3034 aaronmk
'''+select_query+'''
698
    LOOP
699 3015 aaronmk
        BEGIN
700 3019 aaronmk
            RETURN QUERY
701 3014 aaronmk
'''+mk_insert(sql_gen.Values(row).to_str(db))+'''
702 3010 aaronmk
;
703 3015 aaronmk
        EXCEPTION
704 3019 aaronmk
            WHEN unique_violation THEN NULL; -- continue to next row
705 3015 aaronmk
        END;
706 3010 aaronmk
    END LOOP;
707
END;\
708 3009 aaronmk
'''
709
    else: query = mk_insert(select_query)
710
711 2070 aaronmk
    if embeddable:
712
        # Create function
713 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
714 2189 aaronmk
        while True:
715
            try:
716 2918 aaronmk
                function = db.TempFunction(function_name)
717 2194 aaronmk
718 2189 aaronmk
                function_query = '''\
719 2698 aaronmk
CREATE FUNCTION '''+function.to_str(db)+'''()
720 3017 aaronmk
RETURNS SETOF '''+return_type+'''
721 3009 aaronmk
LANGUAGE '''+lang+'''
722 2467 aaronmk
AS $$
723 3009 aaronmk
'''+query+'''
724 2467 aaronmk
$$;
725 2070 aaronmk
'''
726 2446 aaronmk
                run_query(db, function_query, recover=True, cacheable=True,
727 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
728 2189 aaronmk
                break # this version was successful
729 2945 aaronmk
            except DuplicateException, e:
730 2189 aaronmk
                function_name = next_version(function_name)
731
                # try again with next version of name
732 2070 aaronmk
733 2337 aaronmk
        # Return query that uses function
734 3009 aaronmk
        cols = None
735
        if returning != None: cols = [returning]
736 2698 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
737 3009 aaronmk
            cols) # AS clause requires function alias
738 2787 aaronmk
        return mk_select(db, func_table, start=0, order_by=None)
739 2070 aaronmk
740 2787 aaronmk
    return query
741 2066 aaronmk
742 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
743 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
744 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
745
        values in
746 2072 aaronmk
    '''
747 2386 aaronmk
    into = kw_args.pop('into', None)
748
    if into != None: kw_args['embeddable'] = True
749 2066 aaronmk
    recover = kw_args.pop('recover', None)
750 3011 aaronmk
    if kw_args.get('ignore', False): recover = True
751 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
752 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
753 2066 aaronmk
754 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
755
        into, recover=recover, cacheable=cacheable, log_level=log_level)
756
    autoanalyze(db, table)
757
    return cur
758 2063 aaronmk
759 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
760 2066 aaronmk
761 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
762 2085 aaronmk
    '''For params, see insert_select()'''
763 1960 aaronmk
    if lists.is_seq(row): cols = None
764
    else:
765
        cols = row.keys()
766
        row = row.values()
767 2738 aaronmk
    row = list(row) # ensure that "== []" works
768 1960 aaronmk
769 2738 aaronmk
    if row == []: query = None
770
    else: query = sql_gen.Values(row).to_str(db)
771 1961 aaronmk
772 2788 aaronmk
    return insert_select(db, table, cols, query, *args, **kw_args)
773 11 aaronmk
774 3056 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False):
775 2402 aaronmk
    '''
776
    @param changes [(col, new_value),...]
777
        * container can be any iterable type
778
        * col: sql_gen.Code|str (for col name)
779
        * new_value: sql_gen.Code|literal value
780
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
781 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
782
        This avoids creating dead rows in PostgreSQL.
783
        * cond must be None
784 2402 aaronmk
    @return str query
785
    '''
786 3057 aaronmk
    table = sql_gen.as_Table(table)
787
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
788
        for c, v in changes]
789
790 3056 aaronmk
    if in_place:
791
        assert cond == None
792 3058 aaronmk
793 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
794
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '
795
            +db.col_info(sql_gen.with_default_table(c, table)).type
796
            +'\nUSING '+v.to_str(db) for c, v in changes))
797 3058 aaronmk
    else:
798
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
799
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
800
            for c, v in changes))
801
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
802 3056 aaronmk
803 2402 aaronmk
    return query
804
805 3074 aaronmk
def update(db, table, *args, **kw_args):
806 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
807
    recover = kw_args.pop('recover', None)
808 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
809 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
810 2402 aaronmk
811 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
812
        cacheable, log_level=log_level)
813
    autoanalyze(db, table)
814
    return cur
815 2402 aaronmk
816 135 aaronmk
def last_insert_id(db):
817 1849 aaronmk
    module = util.root_module(db.db)
818 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
819
    elif module == 'MySQLdb': return db.insert_id()
820
    else: return None
821 13 aaronmk
822 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
823 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
824 2415 aaronmk
    to names that will be distinct among the columns' tables.
825 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
826 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
827
    @param into The table for the new columns.
828 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
829
        columns will be included in the mapping even if they are not in cols.
830
        The tables of the provided Col objects will be changed to into, so make
831
        copies of them if you want to keep the original tables.
832
    @param as_items Whether to return a list of dict items instead of a dict
833 2383 aaronmk
    @return dict(orig_col=new_col, ...)
834
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
835 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
836
        * All mappings use the into table so its name can easily be
837 2383 aaronmk
          changed for all columns at once
838
    '''
839 2415 aaronmk
    cols = lists.uniqify(cols)
840
841 2394 aaronmk
    items = []
842 2389 aaronmk
    for col in preserve:
843 2390 aaronmk
        orig_col = copy.copy(col)
844 2392 aaronmk
        col.table = into
845 2394 aaronmk
        items.append((orig_col, col))
846
    preserve = set(preserve)
847
    for col in cols:
848 2716 aaronmk
        if col not in preserve:
849
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
850 2394 aaronmk
851
    if not as_items: items = dict(items)
852
    return items
853 2383 aaronmk
854 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
855 2391 aaronmk
    '''For params, see mk_flatten_mapping()
856
    @return See return value of mk_flatten_mapping()
857
    '''
858 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
859
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
860 2786 aaronmk
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
861 2846 aaronmk
        into=into, add_indexes_=True)
862 2394 aaronmk
    return dict(items)
863 2391 aaronmk
864 3079 aaronmk
##### Database structure introspection
865 2414 aaronmk
866 3079 aaronmk
#### Tables
867
868
def tables(db, schema_like='public', table_like='%', exact=False):
869
    if exact: compare = '='
870
    else: compare = 'LIKE'
871
872
    module = util.root_module(db.db)
873
    if module == 'psycopg2':
874
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
875
            ('tablename', sql_gen.CompareCond(table_like, compare))]
876
        return values(select(db, 'pg_tables', ['tablename'], conds,
877
            order_by='tablename', log_level=4))
878
    elif module == 'MySQLdb':
879
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
880
            , cacheable=True, log_level=4))
881
    else: raise NotImplementedError("Can't list tables for "+module+' database')
882
883
def table_exists(db, table):
884
    table = sql_gen.as_Table(table)
885
    return list(tables(db, table.schema, table.name, exact=True)) != []
886
887 2426 aaronmk
def table_row_count(db, table, recover=None):
888 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
889 2443 aaronmk
        order_by=None, start=0), recover=recover, log_level=3))
890 2426 aaronmk
891 2414 aaronmk
def table_cols(db, table, recover=None):
892
    return list(col_names(select(db, table, limit=0, order_by=None,
893 2443 aaronmk
        recover=recover, log_level=4)))
894 2414 aaronmk
895 2291 aaronmk
def pkey(db, table, recover=None):
896 832 aaronmk
    '''Assumed to be first column in table'''
897 2339 aaronmk
    return table_cols(db, table, recover)[0]
898 832 aaronmk
899 2559 aaronmk
not_null_col = 'not_null_col'
900 2340 aaronmk
901
def table_not_null_col(db, table, recover=None):
902
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
903
    if not_null_col in table_cols(db, table, recover): return not_null_col
904
    else: return pkey(db, table, recover)
905
906 853 aaronmk
def index_cols(db, table, index):
907
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
908
    automatically created. When you don't know whether something is a UNIQUE
909
    constraint or a UNIQUE index, use this function.'''
910 1909 aaronmk
    module = util.root_module(db.db)
911
    if module == 'psycopg2':
912
        return list(values(run_query(db, '''\
913 853 aaronmk
SELECT attname
914 866 aaronmk
FROM
915
(
916
        SELECT attnum, attname
917
        FROM pg_index
918
        JOIN pg_class index ON index.oid = indexrelid
919
        JOIN pg_class table_ ON table_.oid = indrelid
920
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
921
        WHERE
922 2782 aaronmk
            table_.relname = '''+db.esc_value(table)+'''
923
            AND index.relname = '''+db.esc_value(index)+'''
924 866 aaronmk
    UNION
925
        SELECT attnum, attname
926
        FROM
927
        (
928
            SELECT
929
                indrelid
930
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
931
                    AS indkey
932
            FROM pg_index
933
            JOIN pg_class index ON index.oid = indexrelid
934
            JOIN pg_class table_ ON table_.oid = indrelid
935
            WHERE
936 2782 aaronmk
                table_.relname = '''+db.esc_value(table)+'''
937
                AND index.relname = '''+db.esc_value(index)+'''
938 866 aaronmk
        ) s
939
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
940
) s
941 853 aaronmk
ORDER BY attnum
942 2782 aaronmk
'''
943
            , cacheable=True, log_level=4)))
944 1909 aaronmk
    else: raise NotImplementedError("Can't list index columns for "+module+
945
        ' database')
946 853 aaronmk
947 464 aaronmk
def constraint_cols(db, table, constraint):
948 1849 aaronmk
    module = util.root_module(db.db)
949 464 aaronmk
    if module == 'psycopg2':
950
        return list(values(run_query(db, '''\
951
SELECT attname
952
FROM pg_constraint
953
JOIN pg_class ON pg_class.oid = conrelid
954
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
955
WHERE
956 2783 aaronmk
    relname = '''+db.esc_value(table)+'''
957
    AND conname = '''+db.esc_value(constraint)+'''
958 464 aaronmk
ORDER BY attnum
959 2783 aaronmk
'''
960
            )))
961 464 aaronmk
    else: raise NotImplementedError("Can't list constraint columns for "+module+
962
        ' database')
963
964 3079 aaronmk
#### Functions
965
966
def function_exists(db, function):
967
    function = sql_gen.as_Function(function)
968
969
    info_table = sql_gen.Table('routines', 'information_schema')
970
    conds = [('routine_name', function.name)]
971
    schema = function.schema
972
    if schema != None: conds.append(('routine_schema', schema))
973
    # Exclude trigger functions, since they cannot be called directly
974
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
975
976
    return list(values(select(db, info_table, ['routine_name'], conds,
977
        order_by='routine_schema', limit=1, log_level=4))) != []
978
        # TODO: order_by search_path schema order
979
980
##### Structural changes
981
982
#### Columns
983
984
def add_col(db, table, col, comment=None, **kw_args):
985
    '''
986
    @param col TypedCol Name may be versioned, so be sure to propagate any
987
        renaming back to any source column for the TypedCol.
988
    @param comment None|str SQL comment used to distinguish columns of the same
989
        name from each other when they contain different data, to allow the
990
        ADD COLUMN query to be cached. If not set, query will not be cached.
991
    '''
992
    assert isinstance(col, sql_gen.TypedCol)
993
994
    while True:
995
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
996
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
997
998
        try:
999
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1000
            break
1001
        except DuplicateException:
1002
            col.name = next_version(col.name)
1003
            # try again with next version of name
1004
1005
def add_not_null(db, col):
1006
    table = col.table
1007
    col = sql_gen.to_name_only_col(col)
1008
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1009
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1010
1011 2096 aaronmk
row_num_col = '_row_num'
1012
1013 3079 aaronmk
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
1014
    constraints='PRIMARY KEY')
1015
1016
def add_row_num(db, table):
1017
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1018
    be the primary key.'''
1019
    add_col(db, table, row_num_typed_col, log_level=3)
1020
1021
#### Indexes
1022
1023
def add_pkey(db, table, cols=None, recover=None):
1024
    '''Adds a primary key.
1025
    @param cols [sql_gen.Col,...] The columns in the primary key.
1026
        Defaults to the first column in the table.
1027
    @pre The table must not already have a primary key.
1028
    '''
1029
    table = sql_gen.as_Table(table)
1030
    if cols == None: cols = [pkey(db, table, recover)]
1031
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1032
1033
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1034
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1035
        log_ignore_excs=(DuplicateException,))
1036
1037 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1038 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1039 2538 aaronmk
    Currently, only function calls are supported as expressions.
1040 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1041 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1042 2538 aaronmk
    '''
1043 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1044 2538 aaronmk
1045 2688 aaronmk
    # Parse exprs
1046
    old_exprs = exprs[:]
1047
    exprs = []
1048
    cols = []
1049
    for i, expr in enumerate(old_exprs):
1050 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1051 2688 aaronmk
1052 2823 aaronmk
        # Handle nullable columns
1053 2998 aaronmk
        if ensure_not_null_:
1054
            try: expr = ensure_not_null(db, expr)
1055 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1056 2823 aaronmk
1057 2688 aaronmk
        # Extract col
1058 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1059 2688 aaronmk
        if isinstance(expr, sql_gen.FunctionCall):
1060
            col = expr.args[0]
1061
            expr = sql_gen.Expr(expr)
1062
        else: col = expr
1063 2823 aaronmk
        assert isinstance(col, sql_gen.Col)
1064 2688 aaronmk
1065
        # Extract table
1066
        if table == None:
1067
            assert sql_gen.is_table_col(col)
1068
            table = col.table
1069
1070
        col.table = None
1071
1072
        exprs.append(expr)
1073
        cols.append(col)
1074 2408 aaronmk
1075 2688 aaronmk
    table = sql_gen.as_Table(table)
1076
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
1077
1078 3005 aaronmk
    # Add index
1079
    while True:
1080
        str_ = 'CREATE'
1081
        if unique: str_ += ' UNIQUE'
1082
        str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
1083
            ', '.join((v.to_str(db) for v in exprs)))+')'
1084
1085
        try:
1086
            run_query(db, str_, recover=True, cacheable=True, log_level=3,
1087
                log_ignore_excs=(DuplicateException,))
1088
            break
1089
        except DuplicateException:
1090
            index.name = next_version(index.name)
1091
            # try again with next version of name
1092 2408 aaronmk
1093 2997 aaronmk
def add_index_col(db, col, suffix, expr, nullable=True):
1094 3000 aaronmk
    if sql_gen.index_col(col) != None: return # already has index col
1095 2997 aaronmk
1096
    new_col = sql_gen.suffixed_col(col, suffix)
1097
1098 3006 aaronmk
    # Add column
1099 3038 aaronmk
    new_typed_col = sql_gen.TypedCol(new_col.name, db.col_info(col).type)
1100 3045 aaronmk
    add_col(db, col.table, new_typed_col, comment='src: '+repr(col),
1101
        log_level=3)
1102 3037 aaronmk
    new_col.name = new_typed_col.name # propagate any renaming
1103 3006 aaronmk
1104 3064 aaronmk
    update(db, col.table, [(new_col, expr)], in_place=True, cacheable=True,
1105
        log_level=3)
1106 2997 aaronmk
    if not nullable: add_not_null(db, new_col)
1107
    add_index(db, new_col)
1108
1109 3104 aaronmk
    col.table.index_cols[col.name] = new_col.name
1110 2997 aaronmk
1111 3047 aaronmk
# Controls when ensure_not_null() will use index columns
1112
not_null_index_cols_min_rows = 0 # rows; initially always use index columns
1113
1114 2997 aaronmk
def ensure_not_null(db, col):
1115
    '''For params, see sql_gen.ensure_not_null()'''
1116
    expr = sql_gen.ensure_not_null(db, col)
1117
1118 3047 aaronmk
    # If a nullable column in a temp table, add separate index column instead.
1119
    # Note that for small datasources, this adds 6-25% to the total import time.
1120
    if (sql_gen.is_temp_col(col) and isinstance(expr, sql_gen.EnsureNotNull)
1121
        and table_row_count(db, col.table) >= not_null_index_cols_min_rows):
1122 2997 aaronmk
        add_index_col(db, col, '::NOT NULL', expr, nullable=False)
1123 3000 aaronmk
        expr = sql_gen.index_col(col)
1124 2997 aaronmk
1125
    return expr
1126
1127 3083 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1128
1129
def add_indexes(db, table, has_pkey=True):
1130
    '''Adds an index on all columns in a table.
1131
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1132
        index should be added on the first column.
1133
        * If already_indexed, the pkey is assumed to have already been added
1134
    '''
1135
    cols = table_cols(db, table)
1136
    if has_pkey:
1137
        if has_pkey is not already_indexed: add_pkey(db, table)
1138
        cols = cols[1:]
1139
    for col in cols: add_index(db, col, table)
1140
1141 3079 aaronmk
#### Tables
1142 2772 aaronmk
1143 3079 aaronmk
### Maintenance
1144 2772 aaronmk
1145 3079 aaronmk
def analyze(db, table):
1146
    table = sql_gen.as_Table(table)
1147
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1148 2934 aaronmk
1149 3079 aaronmk
def autoanalyze(db, table):
1150
    if db.autoanalyze: analyze(db, table)
1151 2935 aaronmk
1152 3079 aaronmk
def vacuum(db, table):
1153
    table = sql_gen.as_Table(table)
1154
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1155
        log_level=3))
1156 2086 aaronmk
1157 3079 aaronmk
### Lifecycle
1158
1159 2889 aaronmk
def drop_table(db, table):
1160
    table = sql_gen.as_Table(table)
1161
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1162
1163 3082 aaronmk
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1164
    like=None):
1165 2675 aaronmk
    '''Creates a table.
1166 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1167
    @param has_pkey If set, the first column becomes the primary key.
1168 2760 aaronmk
    @param col_indexes bool|[ref]
1169
        * If True, indexes will be added on all non-pkey columns.
1170
        * If a list reference, [0] will be set to a function to do this.
1171
          This can be used to delay index creation until the table is populated.
1172 2675 aaronmk
    '''
1173
    table = sql_gen.as_Table(table)
1174
1175 3082 aaronmk
    if like != None:
1176
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1177
            ]+cols
1178 2681 aaronmk
    if has_pkey:
1179
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1180 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1181 2681 aaronmk
1182 3085 aaronmk
    temp = table.is_temp and not db.debug_temp
1183
        # temp tables permanent in debug_temp mode
1184 2760 aaronmk
1185 3085 aaronmk
    # Create table
1186
    while True:
1187
        str_ = 'CREATE'
1188
        if temp: str_ += ' TEMP'
1189
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1190
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1191
        str_ += '\n);\n'
1192
1193
        try:
1194
            run_query(db, str_, cacheable=True, log_level=2,
1195
                log_ignore_excs=(DuplicateException,))
1196
            break
1197
        except DuplicateException:
1198
            table.name = next_version(table.name)
1199
            # try again with next version of name
1200
1201 2760 aaronmk
    # Add indexes
1202 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1203
    def add_indexes_(): add_indexes(db, table, has_pkey)
1204
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1205
    elif col_indexes: add_indexes_() # add now
1206 2675 aaronmk
1207 3084 aaronmk
def copy_table_struct(db, src, dest):
1208
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1209 3085 aaronmk
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1210 3084 aaronmk
1211 3079 aaronmk
### Data
1212 2684 aaronmk
1213 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1214
    '''For params, see run_query()'''
1215 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1216 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1217 2732 aaronmk
1218 2965 aaronmk
def empty_temp(db, tables):
1219 2972 aaronmk
    if db.debug_temp: return # leave temp tables there for debugging
1220 2965 aaronmk
    tables = lists.mk_seq(tables)
1221 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1222 2965 aaronmk
1223 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1224
    '''For kw_args, see tables()'''
1225
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1226 3094 aaronmk
1227
def distinct_table(db, table, distinct_on):
1228
    '''Creates a copy of a temp table which is distinct on the given columns.
1229 3099 aaronmk
    The old and new tables will both get an index on these columns, to
1230
    facilitate merge joins.
1231 3097 aaronmk
    @param distinct_on If empty, creates a table with one row. This is useful if
1232
        your distinct_on columns are all literal values.
1233 3099 aaronmk
    @return The new table.
1234 3094 aaronmk
    '''
1235 3099 aaronmk
    new_table = sql_gen.suffixed_table(table, '_distinct')
1236 3094 aaronmk
1237 3099 aaronmk
    copy_table_struct(db, table, new_table)
1238 3097 aaronmk
1239
    limit = None
1240
    if distinct_on == []: limit = 1 # one sample row
1241 3099 aaronmk
    else:
1242
        add_index(db, distinct_on, new_table, unique=True)
1243
        add_index(db, distinct_on, table) # for join optimization
1244 3097 aaronmk
1245 3099 aaronmk
    insert_select(db, new_table, None, mk_select(db, table, start=0,
1246 3097 aaronmk
        limit=limit), ignore=True)
1247 3099 aaronmk
    analyze(db, new_table)
1248 3094 aaronmk
1249 3099 aaronmk
    return new_table