Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 11 aaronmk
import re
5 3238 aaronmk
import time
6 865 aaronmk
import warnings
7 11 aaronmk
8 300 aaronmk
import exc
9 1909 aaronmk
import dicts
10 1893 aaronmk
import iters
11 1960 aaronmk
import lists
12 3241 aaronmk
import profiling
13 1889 aaronmk
from Proxy import Proxy
14 1872 aaronmk
import rand
15 2217 aaronmk
import sql_gen
16 862 aaronmk
import strings
17 131 aaronmk
import util
18 11 aaronmk
19 832 aaronmk
##### Exceptions
20
21 2804 aaronmk
def get_cur_query(cur, input_query=None):
22 2168 aaronmk
    raw_query = None
23
    if hasattr(cur, 'query'): raw_query = cur.query
24
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
25 2170 aaronmk
26
    if raw_query != None: return raw_query
27 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
28 14 aaronmk
29 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
30
    '''For params, see get_cur_query()'''
31 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
32 135 aaronmk
33 300 aaronmk
class DbException(exc.ExceptionWithCause):
34 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
35 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
36 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
37
38 2143 aaronmk
class ExceptionWithName(DbException):
39
    def __init__(self, name, cause=None):
40 4491 aaronmk
        DbException.__init__(self, 'for name: '
41
            +strings.as_tt(strings.ustr(name)), cause)
42 2143 aaronmk
        self.name = name
43 360 aaronmk
44 3109 aaronmk
class ExceptionWithValue(DbException):
45
    def __init__(self, value, cause=None):
46 4492 aaronmk
        DbException.__init__(self, 'for value: '
47
            +strings.as_tt(strings.urepr(value)), cause)
48 2240 aaronmk
        self.value = value
49
50 2945 aaronmk
class ExceptionWithNameType(DbException):
51
    def __init__(self, type_, name, cause=None):
52 4491 aaronmk
        DbException.__init__(self, 'for type: '+strings.as_tt(strings.ustr(
53
            type_))+'; name: '+strings.as_tt(name), cause)
54 2945 aaronmk
        self.type = type_
55
        self.name = name
56
57 2306 aaronmk
class ConstraintException(DbException):
58 3345 aaronmk
    def __init__(self, name, cond, cols, cause=None):
59
        msg = 'Violated '+strings.as_tt(name)+' constraint'
60 5447 aaronmk
        if cond != None: msg += ' with condition '+strings.as_tt(cond)
61 3345 aaronmk
        if cols != []: msg += ' on columns: '+strings.as_tt(', '.join(cols))
62
        DbException.__init__(self, msg, cause)
63 2306 aaronmk
        self.name = name
64 3345 aaronmk
        self.cond = cond
65 468 aaronmk
        self.cols = cols
66 11 aaronmk
67 2523 aaronmk
class MissingCastException(DbException):
68 4139 aaronmk
    def __init__(self, type_, col=None, cause=None):
69
        msg = 'Missing cast to type '+strings.as_tt(type_)
70
        if col != None: msg += ' on column: '+strings.as_tt(col)
71
        DbException.__init__(self, msg, cause)
72 2523 aaronmk
        self.type = type_
73
        self.col = col
74
75 5576 aaronmk
class EncodingException(ExceptionWithName): pass
76
77 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
78 13 aaronmk
79 2306 aaronmk
class NullValueException(ConstraintException): pass
80 13 aaronmk
81 3346 aaronmk
class CheckException(ConstraintException): pass
82
83 3109 aaronmk
class InvalidValueException(ExceptionWithValue): pass
84 2239 aaronmk
85 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
86 2143 aaronmk
87 3419 aaronmk
class DoesNotExistException(ExceptionWithNameType): pass
88
89 89 aaronmk
class EmptyRowException(DbException): pass
90
91 865 aaronmk
##### Warnings
92
93
class DbWarning(UserWarning): pass
94
95 1930 aaronmk
##### Result retrieval
96
97
def col_names(cur): return (col[0] for col in cur.description)
98
99
def rows(cur): return iter(lambda: cur.fetchone(), None)
100
101
def consume_rows(cur):
102
    '''Used to fetch all rows so result will be cached'''
103
    iters.consume_iter(rows(cur))
104
105
def next_row(cur): return rows(cur).next()
106
107
def row(cur):
108
    row_ = next_row(cur)
109
    consume_rows(cur)
110
    return row_
111
112
def next_value(cur): return next_row(cur)[0]
113
114
def value(cur): return row(cur)[0]
115
116
def values(cur): return iters.func_iter(lambda: next_value(cur))
117
118
def value_or_none(cur):
119
    try: return value(cur)
120
    except StopIteration: return None
121
122 2762 aaronmk
##### Escaping
123 2101 aaronmk
124 2573 aaronmk
def esc_name_by_module(module, name):
125
    if module == 'psycopg2' or module == None: quote = '"'
126 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
127
    else: raise NotImplementedError("Can't escape name for "+module+' database')
128 2500 aaronmk
    return sql_gen.esc_name(name, quote)
129 2101 aaronmk
130
def esc_name_by_engine(engine, name, **kw_args):
131
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
132
133
def esc_name(db, name, **kw_args):
134
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
135
136
def qual_name(db, schema, table):
137
    def esc_name_(name): return esc_name(db, name)
138
    table = esc_name_(table)
139
    if schema != None: return esc_name_(schema)+'.'+table
140
    else: return table
141
142 1869 aaronmk
##### Database connections
143 1849 aaronmk
144 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
145 1926 aaronmk
146 1869 aaronmk
db_engines = {
147
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
148
    'PostgreSQL': ('psycopg2', {}),
149
}
150
151
DatabaseErrors_set = set([DbException])
152
DatabaseErrors = tuple(DatabaseErrors_set)
153
154
def _add_module(module):
155
    DatabaseErrors_set.add(module.DatabaseError)
156
    global DatabaseErrors
157
    DatabaseErrors = tuple(DatabaseErrors_set)
158
159
def db_config_str(db_config):
160
    return db_config['engine']+' database '+db_config['database']
161
162 2448 aaronmk
log_debug_none = lambda msg, level=2: None
163 1901 aaronmk
164 1849 aaronmk
class DbConn:
165 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
166 3183 aaronmk
        log_debug=log_debug_none, debug_temp=False, src=None):
167 2915 aaronmk
        '''
168
        @param debug_temp Whether temporary objects should instead be permanent.
169
            This assists in debugging the internal objects used by the program.
170 3183 aaronmk
        @param src In autocommit mode, will be included in a comment in every
171
            query, to help identify the data source in pg_stat_activity.
172 2915 aaronmk
        '''
173 1869 aaronmk
        self.db_config = db_config
174 2190 aaronmk
        self.autocommit = autocommit
175
        self.caching = caching
176 1901 aaronmk
        self.log_debug = log_debug
177 2193 aaronmk
        self.debug = log_debug != log_debug_none
178 2915 aaronmk
        self.debug_temp = debug_temp
179 3183 aaronmk
        self.src = src
180 3074 aaronmk
        self.autoanalyze = False
181 3269 aaronmk
        self.autoexplain = False
182
        self.profile_row_ct = None
183 1869 aaronmk
184 3124 aaronmk
        self._savepoint = 0
185 3120 aaronmk
        self._reset()
186 1869 aaronmk
187
    def __getattr__(self, name):
188
        if name == '__dict__': raise Exception('getting __dict__')
189
        if name == 'db': return self._db()
190
        else: raise AttributeError()
191
192
    def __getstate__(self):
193
        state = copy.copy(self.__dict__) # shallow copy
194 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
195 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
196
        return state
197
198 3118 aaronmk
    def clear_cache(self): self.query_results = {}
199
200 3120 aaronmk
    def _reset(self):
201 3118 aaronmk
        self.clear_cache()
202 3124 aaronmk
        assert self._savepoint == 0
203 3118 aaronmk
        self._notices_seen = set()
204
        self.__db = None
205
206 2165 aaronmk
    def connected(self): return self.__db != None
207
208 3116 aaronmk
    def close(self):
209 3119 aaronmk
        if not self.connected(): return
210
211 3135 aaronmk
        # Record that the automatic transaction is now closed
212 3136 aaronmk
        self._savepoint -= 1
213 3135 aaronmk
214 3119 aaronmk
        self.db.close()
215 3120 aaronmk
        self._reset()
216 3116 aaronmk
217 3125 aaronmk
    def reconnect(self):
218
        # Do not do this in test mode as it would roll back everything
219
        if self.autocommit: self.close()
220
        # Connection will be reopened automatically on first query
221
222 1869 aaronmk
    def _db(self):
223
        if self.__db == None:
224
            # Process db_config
225
            db_config = self.db_config.copy() # don't modify input!
226 2097 aaronmk
            schemas = db_config.pop('schemas', None)
227 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
228
            module = __import__(module_name)
229
            _add_module(module)
230
            for orig, new in mappings.iteritems():
231
                try: util.rename_key(db_config, orig, new)
232
                except KeyError: pass
233
234
            # Connect
235
            self.__db = module.connect(**db_config)
236
237 3161 aaronmk
            # Record that a transaction is already open
238
            self._savepoint += 1
239
240 1869 aaronmk
            # Configure connection
241 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
242
                import psycopg2.extensions
243
                self.db.set_isolation_level(
244
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
245 2101 aaronmk
            if schemas != None:
246 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
247
                search_path.append(value(run_query(self, 'SHOW search_path',
248
                    log_level=4)))
249
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
250
                    log_level=3)
251 1869 aaronmk
252
        return self.__db
253 1889 aaronmk
254 1891 aaronmk
    class DbCursor(Proxy):
255 1927 aaronmk
        def __init__(self, outer):
256 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
257 2191 aaronmk
            self.outer = outer
258 1927 aaronmk
            self.query_results = outer.query_results
259 1894 aaronmk
            self.query_lookup = None
260 1891 aaronmk
            self.result = []
261 1889 aaronmk
262 2802 aaronmk
        def execute(self, query):
263 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
264 2797 aaronmk
            self.query_lookup = query
265 2148 aaronmk
            try:
266 3162 aaronmk
                try: cur = self.inner.execute(query)
267 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
268 1904 aaronmk
            except Exception, e:
269
                self.result = e # cache the exception as the result
270
                self._cache_result()
271
                raise
272 3004 aaronmk
273
            # Always cache certain queries
274 3183 aaronmk
            query = sql_gen.lstrip(query)
275 3004 aaronmk
            if query.startswith('CREATE') or query.startswith('ALTER'):
276 3007 aaronmk
                # structural changes
277 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
278
                # so don't cache ADD COLUMN unless it has distinguishing comment
279
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
280 3007 aaronmk
                    self._cache_result()
281 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
282 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
283 3004 aaronmk
284 2762 aaronmk
            return cur
285 1894 aaronmk
286 1891 aaronmk
        def fetchone(self):
287
            row = self.inner.fetchone()
288 1899 aaronmk
            if row != None: self.result.append(row)
289
            # otherwise, fetched all rows
290 1904 aaronmk
            else: self._cache_result()
291
            return row
292
293
        def _cache_result(self):
294 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
295
            # inserts are not idempotent. Other non-SELECT queries don't have
296
            # their result set read, so only exceptions will be cached (an
297
            # invalid query will always be invalid).
298 1930 aaronmk
            if self.query_results != None and (not self._is_insert
299 1906 aaronmk
                or isinstance(self.result, Exception)):
300
301 1894 aaronmk
                assert self.query_lookup != None
302 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
303
                    util.dict_subset(dicts.AttrsDictView(self),
304
                    ['query', 'result', 'rowcount', 'description']))
305 1906 aaronmk
306 1916 aaronmk
        class CacheCursor:
307
            def __init__(self, cached_result): self.__dict__ = cached_result
308
309 1927 aaronmk
            def execute(self, *args, **kw_args):
310 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
311
                # otherwise, result is a rows list
312
                self.iter = iter(self.result)
313
314
            def fetchone(self):
315
                try: return self.iter.next()
316
                except StopIteration: return None
317 1891 aaronmk
318 2212 aaronmk
    def esc_value(self, value):
319 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
320
        except NotImplementedError, e:
321
            module = util.root_module(self.db)
322
            if module == 'MySQLdb':
323
                import _mysql
324
                str_ = _mysql.escape_string(value)
325
            else: raise e
326 2374 aaronmk
        return strings.to_unicode(str_)
327 2212 aaronmk
328 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
329
330 2814 aaronmk
    def std_code(self, str_):
331
        '''Standardizes SQL code.
332
        * Ensures that string literals are prefixed by `E`
333
        '''
334
        if str_.startswith("'"): str_ = 'E'+str_
335
        return str_
336
337 2665 aaronmk
    def can_mogrify(self):
338 2663 aaronmk
        module = util.root_module(self.db)
339 2665 aaronmk
        return module == 'psycopg2'
340 2663 aaronmk
341 2665 aaronmk
    def mogrify(self, query, params=None):
342
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
343
        else: raise NotImplementedError("Can't mogrify query")
344
345 5579 aaronmk
    def set_encoding(self, encoding):
346
        encoding_str = sql_gen.Literal(encoding)
347
        run_query(self, 'SET NAMES '+encoding_str.to_str(self))
348
349 2671 aaronmk
    def print_notices(self):
350 2725 aaronmk
        if hasattr(self.db, 'notices'):
351
            for msg in self.db.notices:
352
                if msg not in self._notices_seen:
353
                    self._notices_seen.add(msg)
354
                    self.log_debug(msg, level=2)
355 2671 aaronmk
356 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
357 2464 aaronmk
        debug_msg_ref=None):
358 2445 aaronmk
        '''
359 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
360
            throws one of these exceptions.
361 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
362
            this instead of being output. This allows you to filter log messages
363
            depending on the result of the query.
364 2445 aaronmk
        '''
365 2167 aaronmk
        assert query != None
366
367 3183 aaronmk
        if self.autocommit and self.src != None:
368 3206 aaronmk
            query = sql_gen.esc_comment(self.src)+'\t'+query
369 3183 aaronmk
370 2047 aaronmk
        if not self.caching: cacheable = False
371 1903 aaronmk
        used_cache = False
372 2664 aaronmk
373 3242 aaronmk
        if self.debug:
374
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
375 1903 aaronmk
        try:
376 1927 aaronmk
            # Get cursor
377
            if cacheable:
378 3238 aaronmk
                try: cur = self.query_results[query]
379 1927 aaronmk
                except KeyError: cur = self.DbCursor(self)
380 3238 aaronmk
                else: used_cache = True
381 1927 aaronmk
            else: cur = self.db.cursor()
382
383
            # Run query
384 3238 aaronmk
            try: cur.execute(query)
385 3162 aaronmk
            except Exception, e:
386
                _add_cursor_info(e, self, query)
387
                raise
388 3238 aaronmk
            else: self.do_autocommit()
389 1903 aaronmk
        finally:
390 3242 aaronmk
            if self.debug:
391 3244 aaronmk
                profiler.stop(self.profile_row_ct)
392 3242 aaronmk
393
                ## Log or return query
394
395 4491 aaronmk
                query = strings.ustr(get_cur_query(cur, query))
396 3281 aaronmk
                # Put the src comment on a separate line in the log file
397
                query = query.replace('\t', '\n', 1)
398 3239 aaronmk
399 3240 aaronmk
                msg = 'DB query: '
400 3239 aaronmk
401 3240 aaronmk
                if used_cache: msg += 'cache hit'
402
                elif cacheable: msg += 'cache miss'
403
                else: msg += 'non-cacheable'
404 3239 aaronmk
405 3241 aaronmk
                msg += ':\n'+profiler.msg()+'\n'+strings.as_code(query, 'SQL')
406 3240 aaronmk
407 3237 aaronmk
                if debug_msg_ref != None: debug_msg_ref[0] = msg
408
                else: self.log_debug(msg, log_level)
409 3245 aaronmk
410
                self.print_notices()
411 1903 aaronmk
412
        return cur
413 1914 aaronmk
414 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
415 2139 aaronmk
416 2907 aaronmk
    def with_autocommit(self, func):
417 2801 aaronmk
        import psycopg2.extensions
418
419
        prev_isolation_level = self.db.isolation_level
420 2907 aaronmk
        self.db.set_isolation_level(
421
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
422 2683 aaronmk
        try: return func()
423 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
424 2683 aaronmk
425 2139 aaronmk
    def with_savepoint(self, func):
426 3137 aaronmk
        top = self._savepoint == 0
427 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
428 3137 aaronmk
429 3272 aaronmk
        if self.debug:
430 3273 aaronmk
            self.log_debug('Begin transaction', level=4)
431 3272 aaronmk
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
432
433 3160 aaronmk
        # Must happen before running queries so they don't get autocommitted
434
        self._savepoint += 1
435
436 3137 aaronmk
        if top: query = 'START TRANSACTION ISOLATION LEVEL READ COMMITTED'
437
        else: query = 'SAVEPOINT '+savepoint
438
        self.run_query(query, log_level=4)
439
        try:
440
            return func()
441
            if top: self.run_query('COMMIT', log_level=4)
442 2139 aaronmk
        except:
443 3137 aaronmk
            if top: query = 'ROLLBACK'
444
            else: query = 'ROLLBACK TO SAVEPOINT '+savepoint
445
            self.run_query(query, log_level=4)
446
447 2139 aaronmk
            raise
448 2930 aaronmk
        finally:
449
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
450
            # "The savepoint remains valid and can be rolled back to again"
451
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
452 3137 aaronmk
            if not top:
453
                self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
454 2930 aaronmk
455
            self._savepoint -= 1
456
            assert self._savepoint >= 0
457
458 3272 aaronmk
            if self.debug:
459
                profiler.stop(self.profile_row_ct)
460 3273 aaronmk
                self.log_debug('End transaction\n'+profiler.msg(), level=4)
461 3272 aaronmk
462 2930 aaronmk
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
463 2191 aaronmk
464
    def do_autocommit(self):
465
        '''Autocommits if outside savepoint'''
466 3135 aaronmk
        assert self._savepoint >= 1
467
        if self.autocommit and self._savepoint == 1:
468 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
469 2191 aaronmk
            self.db.commit()
470 2643 aaronmk
471 3155 aaronmk
    def col_info(self, col, cacheable=True):
472 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
473 4936 aaronmk
        cols = [sql_gen.Col('data_type'), sql_gen.Col('udt_name'),
474
            'column_default', sql_gen.Cast('boolean',
475
            sql_gen.Col('is_nullable'))]
476 2643 aaronmk
477 3750 aaronmk
        conds = [('table_name', col.table.name),
478
            ('column_name', strings.ustr(col.name))]
479 2643 aaronmk
        schema = col.table.schema
480
        if schema != None: conds.append(('table_schema', schema))
481
482 3638 aaronmk
        cur = select(self, table, cols, conds, order_by='table_schema', limit=1,
483
            cacheable=cacheable, log_level=4) # TODO: order by search_path order
484 4936 aaronmk
        try: type_, extra_type, default, nullable = row(cur)
485 4114 aaronmk
        except StopIteration: raise sql_gen.NoUnderlyingTableException(col)
486 2819 aaronmk
        default = sql_gen.as_Code(default, self)
487 4936 aaronmk
        if type_ == 'USER-DEFINED': type_ = extra_type
488 4939 aaronmk
        elif type_ == 'ARRAY':
489
            type_ = sql_gen.ArrayType(strings.remove_prefix('_', extra_type,
490
                require=True))
491 2819 aaronmk
492
        return sql_gen.TypedCol(col.name, type_, default, nullable)
493 2917 aaronmk
494
    def TempFunction(self, name):
495
        if self.debug_temp: schema = None
496
        else: schema = 'pg_temp'
497
        return sql_gen.Function(name, schema)
498 1849 aaronmk
499 1869 aaronmk
connect = DbConn
500
501 832 aaronmk
##### Recoverable querying
502 15 aaronmk
503 5577 aaronmk
def parse_exception(db, e, recover=False):
504
    msg = strings.ustr(e.args[0])
505
    msg = re.sub(r'^(?:PL/Python: )?ValueError: ', r'', msg)
506
507
    match = re.match(r'^invalid byte sequence for encoding "(.+?)":', msg)
508
    if match:
509
        encoding, = match.groups()
510
        raise EncodingException(encoding, e)
511
512
    match = re.match(r'^duplicate key value violates unique constraint "(.+?)"',
513
        msg)
514
    if match:
515
        constraint, = match.groups()
516
        cols = []
517
        cond = None
518
        if recover: # need auto-rollback to run index_cols()
519
            try:
520
                cols = index_cols(db, constraint)
521
                cond = index_cond(db, constraint)
522
            except NotImplementedError: pass
523
        raise DuplicateKeyException(constraint, cond, cols, e)
524
525
    match = re.match(r'^null value in column "(.+?)" violates not-null'
526
        r' constraint', msg)
527
    if match:
528
        col, = match.groups()
529
        raise NullValueException('NOT NULL', None, [col], e)
530
531
    match = re.match(r'^new row for relation "(.+?)" violates check '
532
        r'constraint "(.+?)"', msg)
533
    if match:
534
        table, constraint = match.groups()
535
        constraint = sql_gen.Col(constraint, table)
536
        cond = None
537
        if recover: # need auto-rollback to run constraint_cond()
538
            try: cond = constraint_cond(db, constraint)
539
            except NotImplementedError: pass
540
        raise CheckException(constraint.to_str(db), cond, [], e)
541
542
    match = re.match(r'^(?:invalid input (?:syntax|value)\b.*?'
543
        r'|.+? out of range): "(.+?)"', msg)
544
    if match:
545
        value, = match.groups()
546
        raise InvalidValueException(strings.to_unicode(value), e)
547
548
    match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
549
        r'is of type', msg)
550
    if match:
551
        col, type_ = match.groups()
552
        raise MissingCastException(type_, col, e)
553
554
    match = re.match(r'^could not determine polymorphic type because '
555
        r'input has type "unknown"', msg)
556
    if match: raise MissingCastException('text', None, e)
557
558
    match = re.match(r'^.+? types .+? and .+? cannot be matched', msg)
559
    if match: raise MissingCastException('text', None, e)
560
561
    typed_name_re = r'^(\S+) "(.+?)"(?: of relation ".+?")?'
562
563
    match = re.match(typed_name_re+r'.*? already exists', msg)
564
    if match:
565
        type_, name = match.groups()
566
        raise DuplicateException(type_, name, e)
567
568
    match = re.match(r'more than one (\S+) named ""(.+?)""', msg)
569
    if match:
570
        type_, name = match.groups()
571
        raise DuplicateException(type_, name, e)
572
573
    match = re.match(typed_name_re+r' does not exist', msg)
574
    if match:
575
        type_, name = match.groups()
576 5683 aaronmk
        if type_ == 'function':
577
            match = re.match(r'^(.+?)\(([^,)]+).*\)$', name)
578
            if match:
579
                function_name, param0_type = match.groups()
580
                if msg.split('\n')[1].find(function_name) >= 0: # also on line 2
581
                    # not found only because of a missing cast
582
                    raise MissingCastException(param0_type, function_name, e)
583 5577 aaronmk
        raise DoesNotExistException(type_, name, e)
584
585
    raise # no specific exception raised
586
587 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
588 11 aaronmk
589 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
590
    log_ignore_excs=None, **kw_args):
591 2794 aaronmk
    '''For params, see DbConn.run_query()'''
592 830 aaronmk
    if recover == None: recover = False
593 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
594
    log_ignore_excs = tuple(log_ignore_excs)
595 3236 aaronmk
    debug_msg_ref = [None]
596 830 aaronmk
597 3267 aaronmk
    query = with_explain_comment(db, query)
598 3258 aaronmk
599 2148 aaronmk
    try:
600 2464 aaronmk
        try:
601 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
602 2793 aaronmk
                debug_msg_ref, **kw_args)
603 2796 aaronmk
            if recover and not db.is_cached(query):
604 2464 aaronmk
                return with_savepoint(db, run)
605
            else: return run() # don't need savepoint if cached
606 5577 aaronmk
        except Exception, e: parse_exception(db, e, recover)
607 2464 aaronmk
    except log_ignore_excs:
608
        log_level += 2
609
        raise
610
    finally:
611 3236 aaronmk
        if debug_msg_ref[0] != None: db.log_debug(debug_msg_ref[0], log_level)
612 830 aaronmk
613 832 aaronmk
##### Basic queries
614
615 3256 aaronmk
def is_explainable(query):
616
    # See <http://www.postgresql.org/docs/8.3/static/sql-explain.html#AEN57749>
617 3257 aaronmk
    return re.match(r'^(?:SELECT|INSERT|UPDATE|DELETE|VALUES|EXECUTE|DECLARE)\b'
618
        , query)
619 3256 aaronmk
620 3263 aaronmk
def explain(db, query, **kw_args):
621
    '''
622
    For params, see run_query().
623
    '''
624 3267 aaronmk
    kw_args.setdefault('log_level', 4)
625 3263 aaronmk
626 3750 aaronmk
    return strings.ustr(strings.join_lines(values(run_query(db,
627
        'EXPLAIN '+query, recover=True, cacheable=True, **kw_args))))
628 3256 aaronmk
        # not a higher log_level because it's useful to see what query is being
629
        # run before it's executed, which EXPLAIN effectively provides
630
631 3265 aaronmk
def has_comment(query): return query.endswith('*/')
632
633
def with_explain_comment(db, query, **kw_args):
634 3269 aaronmk
    if db.autoexplain and not has_comment(query) and is_explainable(query):
635 3265 aaronmk
        query += '\n'+sql_gen.esc_comment(' EXPLAIN:\n'
636
            +explain(db, query, **kw_args))
637
    return query
638
639 2153 aaronmk
def next_version(name):
640 2163 aaronmk
    version = 1 # first existing name was version 0
641 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
642 2153 aaronmk
    if match:
643 2586 aaronmk
        name, version = match.groups()
644
        version = int(version)+1
645 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
646 2153 aaronmk
647 2899 aaronmk
def lock_table(db, table, mode):
648
    table = sql_gen.as_Table(table)
649
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
650
651 3303 aaronmk
def run_query_into(db, query, into=None, add_pkey_=False, **kw_args):
652 2085 aaronmk
    '''Outputs a query to a temp table.
653
    For params, see run_query().
654
    '''
655 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
656 2790 aaronmk
657
    assert isinstance(into, sql_gen.Table)
658
659 2992 aaronmk
    into.is_temp = True
660 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
661
    into.schema = None
662 2992 aaronmk
663 2790 aaronmk
    kw_args['recover'] = True
664 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
665 2790 aaronmk
666 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
667 2790 aaronmk
668
    # Create table
669
    while True:
670
        create_query = 'CREATE'
671
        if temp: create_query += ' TEMP'
672
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
673 2385 aaronmk
674 2790 aaronmk
        try:
675
            cur = run_query(db, create_query, **kw_args)
676
                # CREATE TABLE AS sets rowcount to # rows in query
677
            break
678 2945 aaronmk
        except DuplicateException, e:
679 2790 aaronmk
            into.name = next_version(into.name)
680
            # try again with next version of name
681
682 3303 aaronmk
    if add_pkey_: add_pkey(db, into)
683 3075 aaronmk
684
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
685
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
686
    # table is going to be used in complex queries, it is wise to run ANALYZE on
687
    # the temporary table after it is populated."
688
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
689
    # If into is not a temp table, ANALYZE is useful but not required.
690 3073 aaronmk
    analyze(db, into)
691 2790 aaronmk
692
    return cur
693 2085 aaronmk
694 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
695
696 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
697
698 3420 aaronmk
def mk_select(db, tables=None, fields=None, conds=None, distinct_on=[],
699 3494 aaronmk
    limit=None, start=None, order_by=order_by_pkey, default_table=None,
700
    explain=True):
701 1981 aaronmk
    '''
702 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
703 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
704 1981 aaronmk
    @param fields Use None to select all fields in the table
705 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
706 2379 aaronmk
        * container can be any iterable type
707 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
708
        * compare_right_side: sql_gen.ValueCond|literal value
709 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
710
        use all columns
711 2786 aaronmk
    @return query
712 1981 aaronmk
    '''
713 2315 aaronmk
    # Parse tables param
714 2964 aaronmk
    tables = lists.mk_seq(tables)
715 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
716 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
717 2121 aaronmk
718 2315 aaronmk
    # Parse other params
719 2376 aaronmk
    if conds == None: conds = []
720 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
721 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
722 3129 aaronmk
    assert limit == None or isinstance(limit, (int, long))
723
    assert start == None or isinstance(start, (int, long))
724 5527 aaronmk
    if limit == 0: order_by = None
725 2315 aaronmk
    if order_by is order_by_pkey:
726 5641 aaronmk
        if lists.is_seq(distinct_on) and distinct_on: order_by = distinct_on[0]
727
        elif table0 != None: order_by = table_order_by(db, table0, recover=True)
728
        else: order_by = None
729 865 aaronmk
730 2315 aaronmk
    query = 'SELECT'
731 2056 aaronmk
732 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
733 2056 aaronmk
734 2200 aaronmk
    # DISTINCT ON columns
735 2233 aaronmk
    if distinct_on != []:
736 2467 aaronmk
        query += '\nDISTINCT'
737 2254 aaronmk
        if distinct_on is not distinct_on_all:
738 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
739
740
    # Columns
741 3185 aaronmk
    if query.find('\n') >= 0: whitespace = '\n'
742
    else: whitespace = ' '
743
    if fields == None: query += whitespace+'*'
744 2765 aaronmk
    else:
745
        assert fields != []
746 3185 aaronmk
        if len(fields) > 1: whitespace = '\n'
747
        query += whitespace+('\n, '.join(map(parse_col, fields)))
748 2200 aaronmk
749
    # Main table
750 3185 aaronmk
    if query.find('\n') >= 0 or len(tables) > 0: whitespace = '\n'
751
    else: whitespace = ' '
752 3420 aaronmk
    if table0 != None: query += whitespace+'FROM '+table0.to_str(db)
753 865 aaronmk
754 2122 aaronmk
    # Add joins
755 2271 aaronmk
    left_table = table0
756 2263 aaronmk
    for join_ in tables:
757
        table = join_.table
758 2238 aaronmk
759 2343 aaronmk
        # Parse special values
760
        if join_.type_ is sql_gen.filter_out: # filter no match
761 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
762 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
763 2343 aaronmk
764 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
765 2122 aaronmk
766
        left_table = table
767
768 865 aaronmk
    missing = True
769 2376 aaronmk
    if conds != []:
770 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
771
        else: whitespace = '\n'
772 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
773
            .to_str(db) for l, r in conds], 'WHERE')
774 2227 aaronmk
    if order_by != None:
775 5642 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by).to_str(db)
776 3297 aaronmk
    if limit != None: query += '\nLIMIT '+str(limit)
777 865 aaronmk
    if start != None:
778 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
779 865 aaronmk
780 3494 aaronmk
    if explain: query = with_explain_comment(db, query)
781 3266 aaronmk
782 2786 aaronmk
    return query
783 11 aaronmk
784 2054 aaronmk
def select(db, *args, **kw_args):
785
    '''For params, see mk_select() and run_query()'''
786
    recover = kw_args.pop('recover', None)
787
    cacheable = kw_args.pop('cacheable', True)
788 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
789 2054 aaronmk
790 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
791
        log_level=log_level)
792 2054 aaronmk
793 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
794 3181 aaronmk
    embeddable=False, ignore=False, src=None):
795 1960 aaronmk
    '''
796
    @param returning str|None An inserted column (such as pkey) to return
797 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
798 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
799
        query will be fully cached, not just if it raises an exception.
800 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
801 3181 aaronmk
    @param src Will be included in the name of any created function, to help
802
        identify the data source in pg_stat_activity.
803 1960 aaronmk
    '''
804 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
805 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
806 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
807 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
808 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
809 2063 aaronmk
810 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
811 2063 aaronmk
812 3009 aaronmk
    def mk_insert(select_query):
813
        query = first_line
814 3014 aaronmk
        if cols != None:
815
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
816 3009 aaronmk
        query += '\n'+select_query
817
818
        if returning != None:
819
            returning_name_col = sql_gen.to_name_only_col(returning)
820
            query += '\nRETURNING '+returning_name_col.to_str(db)
821
822
        return query
823 2063 aaronmk
824 3489 aaronmk
    return_type = sql_gen.CustomCode('unknown')
825
    if returning != None: return_type = sql_gen.ColType(returning)
826 3017 aaronmk
827 3009 aaronmk
    if ignore:
828 3017 aaronmk
        # Always return something to set the correct rowcount
829
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
830
831 3009 aaronmk
        embeddable = True # must use function
832 3010 aaronmk
833 3450 aaronmk
        if cols == None: row = [sql_gen.Col(sql_gen.all_cols, 'row')]
834
        else: row = [sql_gen.Col(c.name, 'row') for c in cols]
835 3092 aaronmk
836 3484 aaronmk
        query = sql_gen.RowExcIgnore(sql_gen.RowType(table), select_query,
837 3497 aaronmk
            sql_gen.ReturnQuery(mk_insert(sql_gen.Values(row).to_str(db))),
838
            cols)
839 3009 aaronmk
    else: query = mk_insert(select_query)
840
841 2070 aaronmk
    if embeddable:
842
        # Create function
843 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
844 3181 aaronmk
        if src != None: function_name = src+': '+function_name
845 2189 aaronmk
        while True:
846
            try:
847 3451 aaronmk
                func = db.TempFunction(function_name)
848 3489 aaronmk
                def_ = sql_gen.FunctionDef(func, sql_gen.SetOf(return_type),
849
                    query)
850 2194 aaronmk
851 3443 aaronmk
                run_query(db, def_.to_str(db), recover=True, cacheable=True,
852 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
853 2189 aaronmk
                break # this version was successful
854 2945 aaronmk
            except DuplicateException, e:
855 2189 aaronmk
                function_name = next_version(function_name)
856
                # try again with next version of name
857 2070 aaronmk
858 2337 aaronmk
        # Return query that uses function
859 3009 aaronmk
        cols = None
860
        if returning != None: cols = [returning]
861 3451 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(func), cols)
862
            # AS clause requires function alias
863 3298 aaronmk
        return mk_select(db, func_table, order_by=None)
864 2070 aaronmk
865 2787 aaronmk
    return query
866 2066 aaronmk
867 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
868 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
869 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
870
        values in
871 2072 aaronmk
    '''
872 3141 aaronmk
    returning = kw_args.get('returning', None)
873
    ignore = kw_args.get('ignore', False)
874
875 2386 aaronmk
    into = kw_args.pop('into', None)
876
    if into != None: kw_args['embeddable'] = True
877 2066 aaronmk
    recover = kw_args.pop('recover', None)
878 3141 aaronmk
    if ignore: recover = True
879 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
880 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
881 2066 aaronmk
882 3141 aaronmk
    rowcount_only = ignore and returning == None # keep NULL rows on server
883
    if rowcount_only: into = sql_gen.Table('rowcount')
884
885 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
886
        into, recover=recover, cacheable=cacheable, log_level=log_level)
887 3141 aaronmk
    if rowcount_only: empty_temp(db, into)
888 3074 aaronmk
    autoanalyze(db, table)
889
    return cur
890 2063 aaronmk
891 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
892 2066 aaronmk
893 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
894 2085 aaronmk
    '''For params, see insert_select()'''
895 5050 aaronmk
    ignore = kw_args.pop('ignore', False)
896 5094 aaronmk
    if ignore: kw_args.setdefault('recover', True)
897 5050 aaronmk
898 1960 aaronmk
    if lists.is_seq(row): cols = None
899
    else:
900
        cols = row.keys()
901
        row = row.values()
902 2738 aaronmk
    row = list(row) # ensure that "== []" works
903 1960 aaronmk
904 2738 aaronmk
    if row == []: query = None
905
    else: query = sql_gen.Values(row).to_str(db)
906 1961 aaronmk
907 5050 aaronmk
    try: return insert_select(db, table, cols, query, *args, **kw_args)
908 5057 aaronmk
    except (DuplicateKeyException, NullValueException):
909 5050 aaronmk
        if not ignore: raise
910 5163 aaronmk
        return None
911 11 aaronmk
912 3152 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False,
913 3153 aaronmk
    cacheable_=True):
914 2402 aaronmk
    '''
915
    @param changes [(col, new_value),...]
916
        * container can be any iterable type
917
        * col: sql_gen.Code|str (for col name)
918
        * new_value: sql_gen.Code|literal value
919
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
920 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
921
        This avoids creating dead rows in PostgreSQL.
922
        * cond must be None
923 3153 aaronmk
    @param cacheable_ Whether column structure information used to generate the
924 3152 aaronmk
        query can be cached
925 2402 aaronmk
    @return str query
926
    '''
927 3057 aaronmk
    table = sql_gen.as_Table(table)
928
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
929
        for c, v in changes]
930
931 3056 aaronmk
    if in_place:
932
        assert cond == None
933 3058 aaronmk
934 5398 aaronmk
        def col_type(col):
935
            return sql_gen.canon_type(db.col_info(
936
                sql_gen.with_default_table(c, table), cacheable_).type)
937
        changes = [(c, v, col_type(c)) for c, v in changes]
938 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
939 5396 aaronmk
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '+t+'\nUSING '
940
            +v.to_str(db) for c, v, t in changes))
941 3058 aaronmk
    else:
942
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
943
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
944
            for c, v in changes))
945
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
946 3056 aaronmk
947 3266 aaronmk
    query = with_explain_comment(db, query)
948
949 2402 aaronmk
    return query
950
951 3074 aaronmk
def update(db, table, *args, **kw_args):
952 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
953
    recover = kw_args.pop('recover', None)
954 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
955 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
956 2402 aaronmk
957 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
958
        cacheable, log_level=log_level)
959
    autoanalyze(db, table)
960
    return cur
961 2402 aaronmk
962 3286 aaronmk
def mk_delete(db, table, cond=None):
963
    '''
964
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
965
    @return str query
966
    '''
967
    query = 'DELETE FROM '+table.to_str(db)
968
    if cond != None: query += '\nWHERE '+cond.to_str(db)
969
970
    query = with_explain_comment(db, query)
971
972
    return query
973
974
def delete(db, table, *args, **kw_args):
975
    '''For params, see mk_delete() and run_query()'''
976
    recover = kw_args.pop('recover', None)
977 3295 aaronmk
    cacheable = kw_args.pop('cacheable', True)
978 3286 aaronmk
    log_level = kw_args.pop('log_level', 2)
979
980
    cur = run_query(db, mk_delete(db, table, *args, **kw_args), recover,
981
        cacheable, log_level=log_level)
982
    autoanalyze(db, table)
983
    return cur
984
985 135 aaronmk
def last_insert_id(db):
986 1849 aaronmk
    module = util.root_module(db.db)
987 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
988
    elif module == 'MySQLdb': return db.insert_id()
989
    else: return None
990 13 aaronmk
991 3490 aaronmk
def define_func(db, def_):
992
    func = def_.function
993
    while True:
994
        try:
995
            run_query(db, def_.to_str(db), recover=True, cacheable=True,
996
                log_ignore_excs=(DuplicateException,))
997
            break # successful
998
        except DuplicateException:
999 3495 aaronmk
            func.name = next_version(func.name)
1000 3490 aaronmk
            # try again with next version of name
1001
1002 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
1003 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
1004 2415 aaronmk
    to names that will be distinct among the columns' tables.
1005 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
1006 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
1007
    @param into The table for the new columns.
1008 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
1009
        columns will be included in the mapping even if they are not in cols.
1010
        The tables of the provided Col objects will be changed to into, so make
1011
        copies of them if you want to keep the original tables.
1012
    @param as_items Whether to return a list of dict items instead of a dict
1013 2383 aaronmk
    @return dict(orig_col=new_col, ...)
1014
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
1015 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
1016
        * All mappings use the into table so its name can easily be
1017 2383 aaronmk
          changed for all columns at once
1018
    '''
1019 2415 aaronmk
    cols = lists.uniqify(cols)
1020
1021 2394 aaronmk
    items = []
1022 2389 aaronmk
    for col in preserve:
1023 2390 aaronmk
        orig_col = copy.copy(col)
1024 2392 aaronmk
        col.table = into
1025 2394 aaronmk
        items.append((orig_col, col))
1026
    preserve = set(preserve)
1027
    for col in cols:
1028 2716 aaronmk
        if col not in preserve:
1029 3750 aaronmk
            items.append((col, sql_gen.Col(strings.ustr(col), into, col.srcs)))
1030 2394 aaronmk
1031
    if not as_items: items = dict(items)
1032
    return items
1033 2383 aaronmk
1034 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
1035 2391 aaronmk
    '''For params, see mk_flatten_mapping()
1036
    @return See return value of mk_flatten_mapping()
1037
    '''
1038 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
1039
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
1040 5523 aaronmk
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
1041
        into=into, add_pkey_=True)
1042 3708 aaronmk
        # don't cache because the temp table will usually be truncated after use
1043 2394 aaronmk
    return dict(items)
1044 2391 aaronmk
1045 3079 aaronmk
##### Database structure introspection
1046 2414 aaronmk
1047 3079 aaronmk
#### Tables
1048
1049 4555 aaronmk
def tables(db, schema_like='public', table_like='%', exact=False,
1050
    cacheable=True):
1051 3079 aaronmk
    if exact: compare = '='
1052
    else: compare = 'LIKE'
1053
1054
    module = util.root_module(db.db)
1055
    if module == 'psycopg2':
1056
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1057
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1058
        return values(select(db, 'pg_tables', ['tablename'], conds,
1059 4555 aaronmk
            order_by='tablename', cacheable=cacheable, log_level=4))
1060 3079 aaronmk
    elif module == 'MySQLdb':
1061
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1062
            , cacheable=True, log_level=4))
1063
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1064
1065 4556 aaronmk
def table_exists(db, table, cacheable=True):
1066 3079 aaronmk
    table = sql_gen.as_Table(table)
1067 4556 aaronmk
    return list(tables(db, table.schema, table.name, True, cacheable)) != []
1068 3079 aaronmk
1069 2426 aaronmk
def table_row_count(db, table, recover=None):
1070 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
1071 3298 aaronmk
        order_by=None), recover=recover, log_level=3))
1072 2426 aaronmk
1073 5337 aaronmk
def table_col_names(db, table, recover=None):
1074 5528 aaronmk
    return list(col_names(select(db, table, limit=0, recover=recover,
1075
        log_level=4)))
1076 2414 aaronmk
1077 5383 aaronmk
def table_cols(db, table, *args, **kw_args):
1078
    return [sql_gen.as_Col(strings.ustr(c), table)
1079
        for c in table_col_names(db, table, *args, **kw_args)]
1080
1081 5521 aaronmk
def table_pkey_index(db, table, recover=None):
1082
    table_str = sql_gen.Literal(table.to_str(db))
1083
    try:
1084
        return sql_gen.Table(value(run_query(db, '''\
1085
SELECT relname
1086
FROM pg_index
1087
JOIN pg_class index ON index.oid = indexrelid
1088
WHERE
1089
indrelid = '''+table_str.to_str(db)+'''::regclass
1090
AND indisprimary
1091
'''
1092
            , recover, cacheable=True, log_level=4)), table.schema)
1093
    except StopIteration: raise DoesNotExistException('primary key', '')
1094
1095 5389 aaronmk
def table_pkey_col(db, table, recover=None):
1096 5061 aaronmk
    table = sql_gen.as_Table(table)
1097
1098
    join_cols = ['table_schema', 'table_name', 'constraint_schema',
1099
        'constraint_name']
1100
    tables = [sql_gen.Table('key_column_usage', 'information_schema'),
1101
        sql_gen.Join(sql_gen.Table('table_constraints', 'information_schema'),
1102
            dict(((c, sql_gen.join_same_not_null) for c in join_cols)))]
1103
    cols = [sql_gen.Col('column_name')]
1104
1105
    conds = [('constraint_type', 'PRIMARY KEY'), ('table_name', table.name)]
1106
    schema = table.schema
1107
    if schema != None: conds.append(('table_schema', schema))
1108
    order_by = 'position_in_unique_constraint'
1109
1110 5393 aaronmk
    try: return sql_gen.Col(value(select(db, tables, cols, conds,
1111
        order_by=order_by, limit=1, log_level=4)), table)
1112
    except StopIteration: raise DoesNotExistException('primary key', '')
1113 5389 aaronmk
1114
def pkey_name(db, table, recover=None):
1115
    '''If no pkey, returns the first column in the table.'''
1116 5392 aaronmk
    return pkey_col(db, table, recover).name
1117 832 aaronmk
1118 5390 aaronmk
def pkey_col(db, table, recover=None):
1119 5391 aaronmk
    '''If no pkey, returns the first column in the table.'''
1120 5392 aaronmk
    try: return table_pkey_col(db, table, recover)
1121 5393 aaronmk
    except DoesNotExistException: return table_cols(db, table, recover)[0]
1122 5128 aaronmk
1123 2559 aaronmk
not_null_col = 'not_null_col'
1124 2340 aaronmk
1125
def table_not_null_col(db, table, recover=None):
1126
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
1127 5337 aaronmk
    if not_null_col in table_col_names(db, table, recover): return not_null_col
1128 5388 aaronmk
    else: return pkey_name(db, table, recover)
1129 2340 aaronmk
1130 3348 aaronmk
def constraint_cond(db, constraint):
1131
    module = util.root_module(db.db)
1132
    if module == 'psycopg2':
1133
        table_str = sql_gen.Literal(constraint.table.to_str(db))
1134
        name_str = sql_gen.Literal(constraint.name)
1135
        return value(run_query(db, '''\
1136
SELECT consrc
1137
FROM pg_constraint
1138
WHERE
1139
conrelid = '''+table_str.to_str(db)+'''::regclass
1140
AND conname = '''+name_str.to_str(db)+'''
1141
'''
1142
            , cacheable=True, log_level=4))
1143 5443 aaronmk
    else: raise NotImplementedError("Can't get constraint condition for "
1144
        +module+' database')
1145 3348 aaronmk
1146 5520 aaronmk
def index_exprs(db, index):
1147 3322 aaronmk
    index = sql_gen.as_Table(index)
1148 1909 aaronmk
    module = util.root_module(db.db)
1149
    if module == 'psycopg2':
1150 3322 aaronmk
        qual_index = sql_gen.Literal(index.to_str(db))
1151 5520 aaronmk
        return list(values(run_query(db, '''\
1152 3322 aaronmk
SELECT pg_get_indexdef(indexrelid, generate_series(1, indnatts), true)
1153
FROM pg_index
1154
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1155 2782 aaronmk
'''
1156
            , cacheable=True, log_level=4)))
1157 5520 aaronmk
    else: raise NotImplementedError()
1158 853 aaronmk
1159 5520 aaronmk
def index_cols(db, index):
1160
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
1161
    automatically created. When you don't know whether something is a UNIQUE
1162
    constraint or a UNIQUE index, use this function.'''
1163
    return map(sql_gen.parse_expr_col, index_exprs(db, index))
1164
1165 5445 aaronmk
def index_cond(db, index):
1166
    index = sql_gen.as_Table(index)
1167
    module = util.root_module(db.db)
1168
    if module == 'psycopg2':
1169
        qual_index = sql_gen.Literal(index.to_str(db))
1170
        return value(run_query(db, '''\
1171
SELECT pg_get_expr(indpred, indrelid, true)
1172
FROM pg_index
1173
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1174
'''
1175
            , cacheable=True, log_level=4))
1176
    else: raise NotImplementedError()
1177
1178 5521 aaronmk
def index_order_by(db, index):
1179
    return sql_gen.CustomCode(', '.join(index_exprs(db, index)))
1180
1181
def table_cluster_on(db, table, recover=None):
1182
    '''
1183
    @return The table's cluster index, or its pkey if none is set
1184
    '''
1185
    table_str = sql_gen.Literal(table.to_str(db))
1186
    try:
1187
        return sql_gen.Table(value(run_query(db, '''\
1188
SELECT relname
1189
FROM pg_index
1190
JOIN pg_class index ON index.oid = indexrelid
1191
WHERE
1192
indrelid = '''+table_str.to_str(db)+'''::regclass
1193
AND indisclustered
1194
'''
1195
            , recover, cacheable=True, log_level=4)), table.schema)
1196
    except StopIteration: return table_pkey_index(db, table, recover)
1197
1198
def table_order_by(db, table, recover=None):
1199 5525 aaronmk
    if table.order_by == None:
1200
        try: table.order_by = index_order_by(db, table_cluster_on(db, table,
1201
            recover))
1202
        except DoesNotExistException: pass
1203
    return table.order_by
1204 5521 aaronmk
1205 3079 aaronmk
#### Functions
1206
1207
def function_exists(db, function):
1208 3423 aaronmk
    qual_function = sql_gen.Literal(function.to_str(db))
1209
    try:
1210 3425 aaronmk
        select(db, fields=[sql_gen.Cast('regproc', qual_function)],
1211
            recover=True, cacheable=True, log_level=4)
1212 3423 aaronmk
    except DoesNotExistException: return False
1213 4146 aaronmk
    except DuplicateException: return True # overloaded function
1214 3423 aaronmk
    else: return True
1215 3079 aaronmk
1216
##### Structural changes
1217
1218
#### Columns
1219
1220 5020 aaronmk
def add_col(db, table, col, comment=None, if_not_exists=False, **kw_args):
1221 3079 aaronmk
    '''
1222
    @param col TypedCol Name may be versioned, so be sure to propagate any
1223
        renaming back to any source column for the TypedCol.
1224
    @param comment None|str SQL comment used to distinguish columns of the same
1225
        name from each other when they contain different data, to allow the
1226
        ADD COLUMN query to be cached. If not set, query will not be cached.
1227
    '''
1228
    assert isinstance(col, sql_gen.TypedCol)
1229
1230
    while True:
1231
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1232
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1233
1234
        try:
1235
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1236
            break
1237
        except DuplicateException:
1238 5020 aaronmk
            if if_not_exists: raise
1239 3079 aaronmk
            col.name = next_version(col.name)
1240
            # try again with next version of name
1241
1242
def add_not_null(db, col):
1243
    table = col.table
1244
    col = sql_gen.to_name_only_col(col)
1245
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1246
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1247
1248 4443 aaronmk
def drop_not_null(db, col):
1249
    table = col.table
1250
    col = sql_gen.to_name_only_col(col)
1251
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1252
        +col.to_str(db)+' DROP NOT NULL', cacheable=True, log_level=3)
1253
1254 2096 aaronmk
row_num_col = '_row_num'
1255
1256 4997 aaronmk
row_num_col_def = sql_gen.TypedCol('', 'serial', nullable=False,
1257 3079 aaronmk
    constraints='PRIMARY KEY')
1258
1259 4997 aaronmk
def add_row_num(db, table, name=row_num_col):
1260
    '''Adds a row number column to a table. Its definition is in
1261
    row_num_col_def. It will be the primary key.'''
1262
    col_def = copy.copy(row_num_col_def)
1263
    col_def.name = name
1264 5021 aaronmk
    add_col(db, table, col_def, comment='', if_not_exists=True, log_level=3)
1265 3079 aaronmk
1266
#### Indexes
1267
1268
def add_pkey(db, table, cols=None, recover=None):
1269
    '''Adds a primary key.
1270
    @param cols [sql_gen.Col,...] The columns in the primary key.
1271
        Defaults to the first column in the table.
1272
    @pre The table must not already have a primary key.
1273
    '''
1274
    table = sql_gen.as_Table(table)
1275 5388 aaronmk
    if cols == None: cols = [pkey_name(db, table, recover)]
1276 3079 aaronmk
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1277
1278
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1279
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1280
        log_ignore_excs=(DuplicateException,))
1281
1282 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1283 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1284 3356 aaronmk
    Currently, only function calls and literal values are supported expressions.
1285 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1286 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1287 2538 aaronmk
    '''
1288 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1289 2538 aaronmk
1290 2688 aaronmk
    # Parse exprs
1291
    old_exprs = exprs[:]
1292
    exprs = []
1293
    cols = []
1294
    for i, expr in enumerate(old_exprs):
1295 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1296 2688 aaronmk
1297 2823 aaronmk
        # Handle nullable columns
1298 2998 aaronmk
        if ensure_not_null_:
1299 3164 aaronmk
            try: expr = sql_gen.ensure_not_null(db, expr)
1300 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1301 2823 aaronmk
1302 2688 aaronmk
        # Extract col
1303 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1304 3356 aaronmk
        col = expr
1305
        if isinstance(expr, sql_gen.FunctionCall): col = expr.args[0]
1306
        expr = sql_gen.cast_literal(expr)
1307
        if not isinstance(expr, (sql_gen.Expr, sql_gen.Col)):
1308 2688 aaronmk
            expr = sql_gen.Expr(expr)
1309 3356 aaronmk
1310 2688 aaronmk
1311
        # Extract table
1312
        if table == None:
1313
            assert sql_gen.is_table_col(col)
1314
            table = col.table
1315
1316 3356 aaronmk
        if isinstance(col, sql_gen.Col): col.table = None
1317 2688 aaronmk
1318
        exprs.append(expr)
1319
        cols.append(col)
1320 2408 aaronmk
1321 2688 aaronmk
    table = sql_gen.as_Table(table)
1322
1323 3005 aaronmk
    # Add index
1324 3148 aaronmk
    str_ = 'CREATE'
1325
    if unique: str_ += ' UNIQUE'
1326
    str_ += ' INDEX ON '+table.to_str(db)+' ('+(
1327
        ', '.join((v.to_str(db) for v in exprs)))+')'
1328
    run_query(db, str_, recover=True, cacheable=True, log_level=3)
1329 2408 aaronmk
1330 3083 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1331
1332
def add_indexes(db, table, has_pkey=True):
1333
    '''Adds an index on all columns in a table.
1334
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1335
        index should be added on the first column.
1336
        * If already_indexed, the pkey is assumed to have already been added
1337
    '''
1338 5337 aaronmk
    cols = table_col_names(db, table)
1339 3083 aaronmk
    if has_pkey:
1340
        if has_pkey is not already_indexed: add_pkey(db, table)
1341
        cols = cols[1:]
1342
    for col in cols: add_index(db, col, table)
1343
1344 3079 aaronmk
#### Tables
1345 2772 aaronmk
1346 3079 aaronmk
### Maintenance
1347 2772 aaronmk
1348 3079 aaronmk
def analyze(db, table):
1349
    table = sql_gen.as_Table(table)
1350
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1351 2934 aaronmk
1352 3079 aaronmk
def autoanalyze(db, table):
1353
    if db.autoanalyze: analyze(db, table)
1354 2935 aaronmk
1355 3079 aaronmk
def vacuum(db, table):
1356
    table = sql_gen.as_Table(table)
1357
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1358
        log_level=3))
1359 2086 aaronmk
1360 3079 aaronmk
### Lifecycle
1361
1362 3247 aaronmk
def drop(db, type_, name):
1363
    name = sql_gen.as_Name(name)
1364
    run_query(db, 'DROP '+type_+' IF EXISTS '+name.to_str(db)+' CASCADE')
1365 2889 aaronmk
1366 3247 aaronmk
def drop_table(db, table): drop(db, 'TABLE', table)
1367
1368 3082 aaronmk
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1369
    like=None):
1370 2675 aaronmk
    '''Creates a table.
1371 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1372
    @param has_pkey If set, the first column becomes the primary key.
1373 2760 aaronmk
    @param col_indexes bool|[ref]
1374
        * If True, indexes will be added on all non-pkey columns.
1375
        * If a list reference, [0] will be set to a function to do this.
1376
          This can be used to delay index creation until the table is populated.
1377 2675 aaronmk
    '''
1378
    table = sql_gen.as_Table(table)
1379
1380 3082 aaronmk
    if like != None:
1381
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1382
            ]+cols
1383 5525 aaronmk
        table.order_by = like.order_by
1384 2681 aaronmk
    if has_pkey:
1385
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1386 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1387 2681 aaronmk
1388 3085 aaronmk
    temp = table.is_temp and not db.debug_temp
1389
        # temp tables permanent in debug_temp mode
1390 2760 aaronmk
1391 3085 aaronmk
    # Create table
1392 3383 aaronmk
    def create():
1393 3085 aaronmk
        str_ = 'CREATE'
1394
        if temp: str_ += ' TEMP'
1395
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1396
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1397 3126 aaronmk
        str_ += '\n);'
1398 3085 aaronmk
1399 3383 aaronmk
        run_query(db, str_, recover=True, cacheable=True, log_level=2,
1400
            log_ignore_excs=(DuplicateException,))
1401
    if table.is_temp:
1402
        while True:
1403
            try:
1404
                create()
1405
                break
1406
            except DuplicateException:
1407
                table.name = next_version(table.name)
1408
                # try again with next version of name
1409
    else: create()
1410 3085 aaronmk
1411 2760 aaronmk
    # Add indexes
1412 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1413
    def add_indexes_(): add_indexes(db, table, has_pkey)
1414
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1415
    elif col_indexes: add_indexes_() # add now
1416 2675 aaronmk
1417 3084 aaronmk
def copy_table_struct(db, src, dest):
1418
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1419 3085 aaronmk
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1420 3084 aaronmk
1421 5529 aaronmk
def copy_table(db, src, dest):
1422
    '''Creates a copy of a table, including data'''
1423
    copy_table_struct(db, src, dest)
1424
    insert_select(db, dest, None, mk_select(db, src))
1425
1426 3079 aaronmk
### Data
1427 2684 aaronmk
1428 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1429
    '''For params, see run_query()'''
1430 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1431 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1432 2732 aaronmk
1433 2965 aaronmk
def empty_temp(db, tables):
1434
    tables = lists.mk_seq(tables)
1435 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1436 2965 aaronmk
1437 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1438
    '''For kw_args, see tables()'''
1439
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1440 3094 aaronmk
1441
def distinct_table(db, table, distinct_on):
1442
    '''Creates a copy of a temp table which is distinct on the given columns.
1443 3099 aaronmk
    The old and new tables will both get an index on these columns, to
1444
    facilitate merge joins.
1445 3097 aaronmk
    @param distinct_on If empty, creates a table with one row. This is useful if
1446
        your distinct_on columns are all literal values.
1447 3099 aaronmk
    @return The new table.
1448 3094 aaronmk
    '''
1449 3099 aaronmk
    new_table = sql_gen.suffixed_table(table, '_distinct')
1450 3411 aaronmk
    distinct_on = filter(sql_gen.is_table_col, distinct_on)
1451 3094 aaronmk
1452 3099 aaronmk
    copy_table_struct(db, table, new_table)
1453 3097 aaronmk
1454
    limit = None
1455
    if distinct_on == []: limit = 1 # one sample row
1456 3099 aaronmk
    else:
1457
        add_index(db, distinct_on, new_table, unique=True)
1458
        add_index(db, distinct_on, table) # for join optimization
1459 3097 aaronmk
1460 3313 aaronmk
    insert_select(db, new_table, None, mk_select(db, table, order_by=None,
1461
        limit=limit), ignore=True)
1462 3099 aaronmk
    analyze(db, new_table)
1463 3094 aaronmk
1464 3099 aaronmk
    return new_table