Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 11 aaronmk
import re
5 3238 aaronmk
import time
6 865 aaronmk
import warnings
7 11 aaronmk
8 300 aaronmk
import exc
9 1909 aaronmk
import dicts
10 1893 aaronmk
import iters
11 1960 aaronmk
import lists
12 3241 aaronmk
import profiling
13 1889 aaronmk
from Proxy import Proxy
14 1872 aaronmk
import rand
15 2217 aaronmk
import sql_gen
16 862 aaronmk
import strings
17 131 aaronmk
import util
18 11 aaronmk
19 832 aaronmk
##### Exceptions
20
21 2804 aaronmk
def get_cur_query(cur, input_query=None):
22 2168 aaronmk
    raw_query = None
23
    if hasattr(cur, 'query'): raw_query = cur.query
24
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
25 2170 aaronmk
26
    if raw_query != None: return raw_query
27 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
28 14 aaronmk
29 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
30
    '''For params, see get_cur_query()'''
31 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
32 135 aaronmk
33 300 aaronmk
class DbException(exc.ExceptionWithCause):
34 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
35 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
36 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
37
38 2143 aaronmk
class ExceptionWithName(DbException):
39
    def __init__(self, name, cause=None):
40 4491 aaronmk
        DbException.__init__(self, 'for name: '
41
            +strings.as_tt(strings.ustr(name)), cause)
42 2143 aaronmk
        self.name = name
43 360 aaronmk
44 3109 aaronmk
class ExceptionWithValue(DbException):
45
    def __init__(self, value, cause=None):
46 4492 aaronmk
        DbException.__init__(self, 'for value: '
47
            +strings.as_tt(strings.urepr(value)), cause)
48 2240 aaronmk
        self.value = value
49
50 2945 aaronmk
class ExceptionWithNameType(DbException):
51
    def __init__(self, type_, name, cause=None):
52 4491 aaronmk
        DbException.__init__(self, 'for type: '+strings.as_tt(strings.ustr(
53
            type_))+'; name: '+strings.as_tt(name), cause)
54 2945 aaronmk
        self.type = type_
55
        self.name = name
56
57 2306 aaronmk
class ConstraintException(DbException):
58 3345 aaronmk
    def __init__(self, name, cond, cols, cause=None):
59
        msg = 'Violated '+strings.as_tt(name)+' constraint'
60
        if cond != None: msg += ' with condition '+cond
61
        if cols != []: msg += ' on columns: '+strings.as_tt(', '.join(cols))
62
        DbException.__init__(self, msg, cause)
63 2306 aaronmk
        self.name = name
64 3345 aaronmk
        self.cond = cond
65 468 aaronmk
        self.cols = cols
66 11 aaronmk
67 2523 aaronmk
class MissingCastException(DbException):
68 4139 aaronmk
    def __init__(self, type_, col=None, cause=None):
69
        msg = 'Missing cast to type '+strings.as_tt(type_)
70
        if col != None: msg += ' on column: '+strings.as_tt(col)
71
        DbException.__init__(self, msg, cause)
72 2523 aaronmk
        self.type = type_
73
        self.col = col
74
75 2143 aaronmk
class NameException(DbException): pass
76
77 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
78 13 aaronmk
79 2306 aaronmk
class NullValueException(ConstraintException): pass
80 13 aaronmk
81 3346 aaronmk
class CheckException(ConstraintException): pass
82
83 3109 aaronmk
class InvalidValueException(ExceptionWithValue): pass
84 2239 aaronmk
85 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
86 2143 aaronmk
87 3419 aaronmk
class DoesNotExistException(ExceptionWithNameType): pass
88
89 89 aaronmk
class EmptyRowException(DbException): pass
90
91 865 aaronmk
##### Warnings
92
93
class DbWarning(UserWarning): pass
94
95 1930 aaronmk
##### Result retrieval
96
97
def col_names(cur): return (col[0] for col in cur.description)
98
99
def rows(cur): return iter(lambda: cur.fetchone(), None)
100
101
def consume_rows(cur):
102
    '''Used to fetch all rows so result will be cached'''
103
    iters.consume_iter(rows(cur))
104
105
def next_row(cur): return rows(cur).next()
106
107
def row(cur):
108
    row_ = next_row(cur)
109
    consume_rows(cur)
110
    return row_
111
112
def next_value(cur): return next_row(cur)[0]
113
114
def value(cur): return row(cur)[0]
115
116
def values(cur): return iters.func_iter(lambda: next_value(cur))
117
118
def value_or_none(cur):
119
    try: return value(cur)
120
    except StopIteration: return None
121
122 2762 aaronmk
##### Escaping
123 2101 aaronmk
124 2573 aaronmk
def esc_name_by_module(module, name):
125
    if module == 'psycopg2' or module == None: quote = '"'
126 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
127
    else: raise NotImplementedError("Can't escape name for "+module+' database')
128 2500 aaronmk
    return sql_gen.esc_name(name, quote)
129 2101 aaronmk
130
def esc_name_by_engine(engine, name, **kw_args):
131
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
132
133
def esc_name(db, name, **kw_args):
134
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
135
136
def qual_name(db, schema, table):
137
    def esc_name_(name): return esc_name(db, name)
138
    table = esc_name_(table)
139
    if schema != None: return esc_name_(schema)+'.'+table
140
    else: return table
141
142 1869 aaronmk
##### Database connections
143 1849 aaronmk
144 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
145 1926 aaronmk
146 1869 aaronmk
db_engines = {
147
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
148
    'PostgreSQL': ('psycopg2', {}),
149
}
150
151
DatabaseErrors_set = set([DbException])
152
DatabaseErrors = tuple(DatabaseErrors_set)
153
154
def _add_module(module):
155
    DatabaseErrors_set.add(module.DatabaseError)
156
    global DatabaseErrors
157
    DatabaseErrors = tuple(DatabaseErrors_set)
158
159
def db_config_str(db_config):
160
    return db_config['engine']+' database '+db_config['database']
161
162 2448 aaronmk
log_debug_none = lambda msg, level=2: None
163 1901 aaronmk
164 1849 aaronmk
class DbConn:
165 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
166 3183 aaronmk
        log_debug=log_debug_none, debug_temp=False, src=None):
167 2915 aaronmk
        '''
168
        @param debug_temp Whether temporary objects should instead be permanent.
169
            This assists in debugging the internal objects used by the program.
170 3183 aaronmk
        @param src In autocommit mode, will be included in a comment in every
171
            query, to help identify the data source in pg_stat_activity.
172 2915 aaronmk
        '''
173 1869 aaronmk
        self.db_config = db_config
174 2190 aaronmk
        self.autocommit = autocommit
175
        self.caching = caching
176 1901 aaronmk
        self.log_debug = log_debug
177 2193 aaronmk
        self.debug = log_debug != log_debug_none
178 2915 aaronmk
        self.debug_temp = debug_temp
179 3183 aaronmk
        self.src = src
180 3074 aaronmk
        self.autoanalyze = False
181 3269 aaronmk
        self.autoexplain = False
182
        self.profile_row_ct = None
183 1869 aaronmk
184 3124 aaronmk
        self._savepoint = 0
185 3120 aaronmk
        self._reset()
186 1869 aaronmk
187
    def __getattr__(self, name):
188
        if name == '__dict__': raise Exception('getting __dict__')
189
        if name == 'db': return self._db()
190
        else: raise AttributeError()
191
192
    def __getstate__(self):
193
        state = copy.copy(self.__dict__) # shallow copy
194 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
195 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
196
        return state
197
198 3118 aaronmk
    def clear_cache(self): self.query_results = {}
199
200 3120 aaronmk
    def _reset(self):
201 3118 aaronmk
        self.clear_cache()
202 3124 aaronmk
        assert self._savepoint == 0
203 3118 aaronmk
        self._notices_seen = set()
204
        self.__db = None
205
206 2165 aaronmk
    def connected(self): return self.__db != None
207
208 3116 aaronmk
    def close(self):
209 3119 aaronmk
        if not self.connected(): return
210
211 3135 aaronmk
        # Record that the automatic transaction is now closed
212 3136 aaronmk
        self._savepoint -= 1
213 3135 aaronmk
214 3119 aaronmk
        self.db.close()
215 3120 aaronmk
        self._reset()
216 3116 aaronmk
217 3125 aaronmk
    def reconnect(self):
218
        # Do not do this in test mode as it would roll back everything
219
        if self.autocommit: self.close()
220
        # Connection will be reopened automatically on first query
221
222 1869 aaronmk
    def _db(self):
223
        if self.__db == None:
224
            # Process db_config
225
            db_config = self.db_config.copy() # don't modify input!
226 2097 aaronmk
            schemas = db_config.pop('schemas', None)
227 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
228
            module = __import__(module_name)
229
            _add_module(module)
230
            for orig, new in mappings.iteritems():
231
                try: util.rename_key(db_config, orig, new)
232
                except KeyError: pass
233
234
            # Connect
235
            self.__db = module.connect(**db_config)
236
237 3161 aaronmk
            # Record that a transaction is already open
238
            self._savepoint += 1
239
240 1869 aaronmk
            # Configure connection
241 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
242
                import psycopg2.extensions
243
                self.db.set_isolation_level(
244
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
245 2101 aaronmk
            if schemas != None:
246 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
247
                search_path.append(value(run_query(self, 'SHOW search_path',
248
                    log_level=4)))
249
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
250
                    log_level=3)
251 1869 aaronmk
252
        return self.__db
253 1889 aaronmk
254 1891 aaronmk
    class DbCursor(Proxy):
255 1927 aaronmk
        def __init__(self, outer):
256 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
257 2191 aaronmk
            self.outer = outer
258 1927 aaronmk
            self.query_results = outer.query_results
259 1894 aaronmk
            self.query_lookup = None
260 1891 aaronmk
            self.result = []
261 1889 aaronmk
262 2802 aaronmk
        def execute(self, query):
263 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
264 2797 aaronmk
            self.query_lookup = query
265 2148 aaronmk
            try:
266 3162 aaronmk
                try: cur = self.inner.execute(query)
267 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
268 1904 aaronmk
            except Exception, e:
269
                self.result = e # cache the exception as the result
270
                self._cache_result()
271
                raise
272 3004 aaronmk
273
            # Always cache certain queries
274 3183 aaronmk
            query = sql_gen.lstrip(query)
275 3004 aaronmk
            if query.startswith('CREATE') or query.startswith('ALTER'):
276 3007 aaronmk
                # structural changes
277 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
278
                # so don't cache ADD COLUMN unless it has distinguishing comment
279
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
280 3007 aaronmk
                    self._cache_result()
281 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
282 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
283 3004 aaronmk
284 2762 aaronmk
            return cur
285 1894 aaronmk
286 1891 aaronmk
        def fetchone(self):
287
            row = self.inner.fetchone()
288 1899 aaronmk
            if row != None: self.result.append(row)
289
            # otherwise, fetched all rows
290 1904 aaronmk
            else: self._cache_result()
291
            return row
292
293
        def _cache_result(self):
294 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
295
            # inserts are not idempotent. Other non-SELECT queries don't have
296
            # their result set read, so only exceptions will be cached (an
297
            # invalid query will always be invalid).
298 1930 aaronmk
            if self.query_results != None and (not self._is_insert
299 1906 aaronmk
                or isinstance(self.result, Exception)):
300
301 1894 aaronmk
                assert self.query_lookup != None
302 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
303
                    util.dict_subset(dicts.AttrsDictView(self),
304
                    ['query', 'result', 'rowcount', 'description']))
305 1906 aaronmk
306 1916 aaronmk
        class CacheCursor:
307
            def __init__(self, cached_result): self.__dict__ = cached_result
308
309 1927 aaronmk
            def execute(self, *args, **kw_args):
310 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
311
                # otherwise, result is a rows list
312
                self.iter = iter(self.result)
313
314
            def fetchone(self):
315
                try: return self.iter.next()
316
                except StopIteration: return None
317 1891 aaronmk
318 2212 aaronmk
    def esc_value(self, value):
319 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
320
        except NotImplementedError, e:
321
            module = util.root_module(self.db)
322
            if module == 'MySQLdb':
323
                import _mysql
324
                str_ = _mysql.escape_string(value)
325
            else: raise e
326 2374 aaronmk
        return strings.to_unicode(str_)
327 2212 aaronmk
328 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
329
330 2814 aaronmk
    def std_code(self, str_):
331
        '''Standardizes SQL code.
332
        * Ensures that string literals are prefixed by `E`
333
        '''
334
        if str_.startswith("'"): str_ = 'E'+str_
335
        return str_
336
337 2665 aaronmk
    def can_mogrify(self):
338 2663 aaronmk
        module = util.root_module(self.db)
339 2665 aaronmk
        return module == 'psycopg2'
340 2663 aaronmk
341 2665 aaronmk
    def mogrify(self, query, params=None):
342
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
343
        else: raise NotImplementedError("Can't mogrify query")
344
345 2671 aaronmk
    def print_notices(self):
346 2725 aaronmk
        if hasattr(self.db, 'notices'):
347
            for msg in self.db.notices:
348
                if msg not in self._notices_seen:
349
                    self._notices_seen.add(msg)
350
                    self.log_debug(msg, level=2)
351 2671 aaronmk
352 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
353 2464 aaronmk
        debug_msg_ref=None):
354 2445 aaronmk
        '''
355 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
356
            throws one of these exceptions.
357 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
358
            this instead of being output. This allows you to filter log messages
359
            depending on the result of the query.
360 2445 aaronmk
        '''
361 2167 aaronmk
        assert query != None
362
363 3183 aaronmk
        if self.autocommit and self.src != None:
364 3206 aaronmk
            query = sql_gen.esc_comment(self.src)+'\t'+query
365 3183 aaronmk
366 2047 aaronmk
        if not self.caching: cacheable = False
367 1903 aaronmk
        used_cache = False
368 2664 aaronmk
369 3242 aaronmk
        if self.debug:
370
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
371 1903 aaronmk
        try:
372 1927 aaronmk
            # Get cursor
373
            if cacheable:
374 3238 aaronmk
                try: cur = self.query_results[query]
375 1927 aaronmk
                except KeyError: cur = self.DbCursor(self)
376 3238 aaronmk
                else: used_cache = True
377 1927 aaronmk
            else: cur = self.db.cursor()
378
379
            # Run query
380 3238 aaronmk
            try: cur.execute(query)
381 3162 aaronmk
            except Exception, e:
382
                _add_cursor_info(e, self, query)
383
                raise
384 3238 aaronmk
            else: self.do_autocommit()
385 1903 aaronmk
        finally:
386 3242 aaronmk
            if self.debug:
387 3244 aaronmk
                profiler.stop(self.profile_row_ct)
388 3242 aaronmk
389
                ## Log or return query
390
391 4491 aaronmk
                query = strings.ustr(get_cur_query(cur, query))
392 3281 aaronmk
                # Put the src comment on a separate line in the log file
393
                query = query.replace('\t', '\n', 1)
394 3239 aaronmk
395 3240 aaronmk
                msg = 'DB query: '
396 3239 aaronmk
397 3240 aaronmk
                if used_cache: msg += 'cache hit'
398
                elif cacheable: msg += 'cache miss'
399
                else: msg += 'non-cacheable'
400 3239 aaronmk
401 3241 aaronmk
                msg += ':\n'+profiler.msg()+'\n'+strings.as_code(query, 'SQL')
402 3240 aaronmk
403 3237 aaronmk
                if debug_msg_ref != None: debug_msg_ref[0] = msg
404
                else: self.log_debug(msg, log_level)
405 3245 aaronmk
406
                self.print_notices()
407 1903 aaronmk
408
        return cur
409 1914 aaronmk
410 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
411 2139 aaronmk
412 2907 aaronmk
    def with_autocommit(self, func):
413 2801 aaronmk
        import psycopg2.extensions
414
415
        prev_isolation_level = self.db.isolation_level
416 2907 aaronmk
        self.db.set_isolation_level(
417
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
418 2683 aaronmk
        try: return func()
419 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
420 2683 aaronmk
421 2139 aaronmk
    def with_savepoint(self, func):
422 3137 aaronmk
        top = self._savepoint == 0
423 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
424 3137 aaronmk
425 3272 aaronmk
        if self.debug:
426 3273 aaronmk
            self.log_debug('Begin transaction', level=4)
427 3272 aaronmk
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
428
429 3160 aaronmk
        # Must happen before running queries so they don't get autocommitted
430
        self._savepoint += 1
431
432 3137 aaronmk
        if top: query = 'START TRANSACTION ISOLATION LEVEL READ COMMITTED'
433
        else: query = 'SAVEPOINT '+savepoint
434
        self.run_query(query, log_level=4)
435
        try:
436
            return func()
437
            if top: self.run_query('COMMIT', log_level=4)
438 2139 aaronmk
        except:
439 3137 aaronmk
            if top: query = 'ROLLBACK'
440
            else: query = 'ROLLBACK TO SAVEPOINT '+savepoint
441
            self.run_query(query, log_level=4)
442
443 2139 aaronmk
            raise
444 2930 aaronmk
        finally:
445
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
446
            # "The savepoint remains valid and can be rolled back to again"
447
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
448 3137 aaronmk
            if not top:
449
                self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
450 2930 aaronmk
451
            self._savepoint -= 1
452
            assert self._savepoint >= 0
453
454 3272 aaronmk
            if self.debug:
455
                profiler.stop(self.profile_row_ct)
456 3273 aaronmk
                self.log_debug('End transaction\n'+profiler.msg(), level=4)
457 3272 aaronmk
458 2930 aaronmk
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
459 2191 aaronmk
460
    def do_autocommit(self):
461
        '''Autocommits if outside savepoint'''
462 3135 aaronmk
        assert self._savepoint >= 1
463
        if self.autocommit and self._savepoint == 1:
464 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
465 2191 aaronmk
            self.db.commit()
466 2643 aaronmk
467 3155 aaronmk
    def col_info(self, col, cacheable=True):
468 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
469 4936 aaronmk
        cols = [sql_gen.Col('data_type'), sql_gen.Col('udt_name'),
470
            'column_default', sql_gen.Cast('boolean',
471
            sql_gen.Col('is_nullable'))]
472 2643 aaronmk
473 3750 aaronmk
        conds = [('table_name', col.table.name),
474
            ('column_name', strings.ustr(col.name))]
475 2643 aaronmk
        schema = col.table.schema
476
        if schema != None: conds.append(('table_schema', schema))
477
478 3638 aaronmk
        cur = select(self, table, cols, conds, order_by='table_schema', limit=1,
479
            cacheable=cacheable, log_level=4) # TODO: order by search_path order
480 4936 aaronmk
        try: type_, extra_type, default, nullable = row(cur)
481 4114 aaronmk
        except StopIteration: raise sql_gen.NoUnderlyingTableException(col)
482 2819 aaronmk
        default = sql_gen.as_Code(default, self)
483 4936 aaronmk
        if type_ == 'USER-DEFINED': type_ = extra_type
484 4939 aaronmk
        elif type_ == 'ARRAY':
485
            type_ = sql_gen.ArrayType(strings.remove_prefix('_', extra_type,
486
                require=True))
487 2819 aaronmk
488
        return sql_gen.TypedCol(col.name, type_, default, nullable)
489 2917 aaronmk
490
    def TempFunction(self, name):
491
        if self.debug_temp: schema = None
492
        else: schema = 'pg_temp'
493
        return sql_gen.Function(name, schema)
494 1849 aaronmk
495 1869 aaronmk
connect = DbConn
496
497 832 aaronmk
##### Recoverable querying
498 15 aaronmk
499 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
500 11 aaronmk
501 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
502
    log_ignore_excs=None, **kw_args):
503 2794 aaronmk
    '''For params, see DbConn.run_query()'''
504 830 aaronmk
    if recover == None: recover = False
505 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
506
    log_ignore_excs = tuple(log_ignore_excs)
507 3236 aaronmk
    debug_msg_ref = [None]
508 830 aaronmk
509 3267 aaronmk
    query = with_explain_comment(db, query)
510 3258 aaronmk
511 2148 aaronmk
    try:
512 2464 aaronmk
        try:
513 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
514 2793 aaronmk
                debug_msg_ref, **kw_args)
515 2796 aaronmk
            if recover and not db.is_cached(query):
516 2464 aaronmk
                return with_savepoint(db, run)
517
            else: return run() # don't need savepoint if cached
518
        except Exception, e:
519 3095 aaronmk
            msg = strings.ustr(e.args[0])
520 4103 aaronmk
            msg = re.sub(r'^(?:PL/Python: )?ValueError: ', r'', msg)
521 2464 aaronmk
522 3095 aaronmk
            match = re.match(r'^duplicate key value violates unique constraint '
523 3338 aaronmk
                r'"(.+?)"', msg)
524 2464 aaronmk
            if match:
525 3338 aaronmk
                constraint, = match.groups()
526 3025 aaronmk
                cols = []
527
                if recover: # need auto-rollback to run index_cols()
528 3319 aaronmk
                    try: cols = index_cols(db, constraint)
529 3025 aaronmk
                    except NotImplementedError: pass
530 3345 aaronmk
                raise DuplicateKeyException(constraint, None, cols, e)
531 2464 aaronmk
532 3095 aaronmk
            match = re.match(r'^null value in column "(.+?)" violates not-null'
533 2464 aaronmk
                r' constraint', msg)
534 3345 aaronmk
            if match:
535
                col, = match.groups()
536
                raise NullValueException('NOT NULL', None, [col], e)
537 2464 aaronmk
538 3346 aaronmk
            match = re.match(r'^new row for relation "(.+?)" violates check '
539
                r'constraint "(.+?)"', msg)
540
            if match:
541
                table, constraint = match.groups()
542 3347 aaronmk
                constraint = sql_gen.Col(constraint, table)
543 3349 aaronmk
                cond = None
544
                if recover: # need auto-rollback to run constraint_cond()
545
                    try: cond = constraint_cond(db, constraint)
546
                    except NotImplementedError: pass
547
                raise CheckException(constraint.to_str(db), cond, [], e)
548 3346 aaronmk
549 3095 aaronmk
            match = re.match(r'^(?:invalid input (?:syntax|value)\b.*?'
550 3635 aaronmk
                r'|.+? out of range): "(.+?)"', msg)
551 2464 aaronmk
            if match:
552 3109 aaronmk
                value, = match.groups()
553
                raise InvalidValueException(strings.to_unicode(value), e)
554 2464 aaronmk
555 3095 aaronmk
            match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
556 2523 aaronmk
                r'is of type', msg)
557
            if match:
558
                col, type_ = match.groups()
559
                raise MissingCastException(type_, col, e)
560
561 4141 aaronmk
            match = re.match(r'^could not determine polymorphic type because '
562
                r'input has type "unknown"', msg)
563
            if match: raise MissingCastException('text', None, e)
564
565 4485 aaronmk
            match = re.match(r'^.+? types .+? and .+? cannot be matched', msg)
566
            if match: raise MissingCastException('text', None, e)
567
568 4509 aaronmk
            typed_name_re = r'^(\S+) "(.+?)"(?: of relation ".+?")?'
569 3419 aaronmk
570
            match = re.match(typed_name_re+r'.*? already exists', msg)
571 2945 aaronmk
            if match:
572
                type_, name = match.groups()
573
                raise DuplicateException(type_, name, e)
574 2464 aaronmk
575 4145 aaronmk
            match = re.match(r'more than one (\S+) named ""(.+?)""', msg)
576
            if match:
577
                type_, name = match.groups()
578
                raise DuplicateException(type_, name, e)
579
580 3419 aaronmk
            match = re.match(typed_name_re+r' does not exist', msg)
581
            if match:
582
                type_, name = match.groups()
583
                raise DoesNotExistException(type_, name, e)
584
585 2464 aaronmk
            raise # no specific exception raised
586
    except log_ignore_excs:
587
        log_level += 2
588
        raise
589
    finally:
590 3236 aaronmk
        if debug_msg_ref[0] != None: db.log_debug(debug_msg_ref[0], log_level)
591 830 aaronmk
592 832 aaronmk
##### Basic queries
593
594 3256 aaronmk
def is_explainable(query):
595
    # See <http://www.postgresql.org/docs/8.3/static/sql-explain.html#AEN57749>
596 3257 aaronmk
    return re.match(r'^(?:SELECT|INSERT|UPDATE|DELETE|VALUES|EXECUTE|DECLARE)\b'
597
        , query)
598 3256 aaronmk
599 3263 aaronmk
def explain(db, query, **kw_args):
600
    '''
601
    For params, see run_query().
602
    '''
603 3267 aaronmk
    kw_args.setdefault('log_level', 4)
604 3263 aaronmk
605 3750 aaronmk
    return strings.ustr(strings.join_lines(values(run_query(db,
606
        'EXPLAIN '+query, recover=True, cacheable=True, **kw_args))))
607 3256 aaronmk
        # not a higher log_level because it's useful to see what query is being
608
        # run before it's executed, which EXPLAIN effectively provides
609
610 3265 aaronmk
def has_comment(query): return query.endswith('*/')
611
612
def with_explain_comment(db, query, **kw_args):
613 3269 aaronmk
    if db.autoexplain and not has_comment(query) and is_explainable(query):
614 3265 aaronmk
        query += '\n'+sql_gen.esc_comment(' EXPLAIN:\n'
615
            +explain(db, query, **kw_args))
616
    return query
617
618 2153 aaronmk
def next_version(name):
619 2163 aaronmk
    version = 1 # first existing name was version 0
620 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
621 2153 aaronmk
    if match:
622 2586 aaronmk
        name, version = match.groups()
623
        version = int(version)+1
624 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
625 2153 aaronmk
626 2899 aaronmk
def lock_table(db, table, mode):
627
    table = sql_gen.as_Table(table)
628
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
629
630 3303 aaronmk
def run_query_into(db, query, into=None, add_pkey_=False, **kw_args):
631 2085 aaronmk
    '''Outputs a query to a temp table.
632
    For params, see run_query().
633
    '''
634 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
635 2790 aaronmk
636
    assert isinstance(into, sql_gen.Table)
637
638 2992 aaronmk
    into.is_temp = True
639 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
640
    into.schema = None
641 2992 aaronmk
642 2790 aaronmk
    kw_args['recover'] = True
643 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
644 2790 aaronmk
645 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
646 2790 aaronmk
647
    # Create table
648
    while True:
649
        create_query = 'CREATE'
650
        if temp: create_query += ' TEMP'
651
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
652 2385 aaronmk
653 2790 aaronmk
        try:
654
            cur = run_query(db, create_query, **kw_args)
655
                # CREATE TABLE AS sets rowcount to # rows in query
656
            break
657 2945 aaronmk
        except DuplicateException, e:
658 2790 aaronmk
            into.name = next_version(into.name)
659
            # try again with next version of name
660
661 3303 aaronmk
    if add_pkey_: add_pkey(db, into)
662 3075 aaronmk
663
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
664
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
665
    # table is going to be used in complex queries, it is wise to run ANALYZE on
666
    # the temporary table after it is populated."
667
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
668
    # If into is not a temp table, ANALYZE is useful but not required.
669 3073 aaronmk
    analyze(db, into)
670 2790 aaronmk
671
    return cur
672 2085 aaronmk
673 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
674
675 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
676
677 3420 aaronmk
def mk_select(db, tables=None, fields=None, conds=None, distinct_on=[],
678 3494 aaronmk
    limit=None, start=None, order_by=order_by_pkey, default_table=None,
679
    explain=True):
680 1981 aaronmk
    '''
681 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
682 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
683 1981 aaronmk
    @param fields Use None to select all fields in the table
684 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
685 2379 aaronmk
        * container can be any iterable type
686 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
687
        * compare_right_side: sql_gen.ValueCond|literal value
688 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
689
        use all columns
690 2786 aaronmk
    @return query
691 1981 aaronmk
    '''
692 2315 aaronmk
    # Parse tables param
693 2964 aaronmk
    tables = lists.mk_seq(tables)
694 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
695 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
696 2121 aaronmk
697 2315 aaronmk
    # Parse other params
698 2376 aaronmk
    if conds == None: conds = []
699 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
700 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
701 3129 aaronmk
    assert limit == None or isinstance(limit, (int, long))
702
    assert start == None or isinstance(start, (int, long))
703 2315 aaronmk
    if order_by is order_by_pkey:
704 3421 aaronmk
        if table0 == None or distinct_on != []: order_by = None
705 2315 aaronmk
        else: order_by = pkey(db, table0, recover=True)
706 865 aaronmk
707 2315 aaronmk
    query = 'SELECT'
708 2056 aaronmk
709 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
710 2056 aaronmk
711 2200 aaronmk
    # DISTINCT ON columns
712 2233 aaronmk
    if distinct_on != []:
713 2467 aaronmk
        query += '\nDISTINCT'
714 2254 aaronmk
        if distinct_on is not distinct_on_all:
715 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
716
717
    # Columns
718 3185 aaronmk
    if query.find('\n') >= 0: whitespace = '\n'
719
    else: whitespace = ' '
720
    if fields == None: query += whitespace+'*'
721 2765 aaronmk
    else:
722
        assert fields != []
723 3185 aaronmk
        if len(fields) > 1: whitespace = '\n'
724
        query += whitespace+('\n, '.join(map(parse_col, fields)))
725 2200 aaronmk
726
    # Main table
727 3185 aaronmk
    if query.find('\n') >= 0 or len(tables) > 0: whitespace = '\n'
728
    else: whitespace = ' '
729 3420 aaronmk
    if table0 != None: query += whitespace+'FROM '+table0.to_str(db)
730 865 aaronmk
731 2122 aaronmk
    # Add joins
732 2271 aaronmk
    left_table = table0
733 2263 aaronmk
    for join_ in tables:
734
        table = join_.table
735 2238 aaronmk
736 2343 aaronmk
        # Parse special values
737
        if join_.type_ is sql_gen.filter_out: # filter no match
738 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
739 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
740 2343 aaronmk
741 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
742 2122 aaronmk
743
        left_table = table
744
745 865 aaronmk
    missing = True
746 2376 aaronmk
    if conds != []:
747 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
748
        else: whitespace = '\n'
749 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
750
            .to_str(db) for l, r in conds], 'WHERE')
751 2227 aaronmk
    if order_by != None:
752 2467 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
753 3297 aaronmk
    if limit != None: query += '\nLIMIT '+str(limit)
754 865 aaronmk
    if start != None:
755 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
756 865 aaronmk
757 3494 aaronmk
    if explain: query = with_explain_comment(db, query)
758 3266 aaronmk
759 2786 aaronmk
    return query
760 11 aaronmk
761 2054 aaronmk
def select(db, *args, **kw_args):
762
    '''For params, see mk_select() and run_query()'''
763
    recover = kw_args.pop('recover', None)
764
    cacheable = kw_args.pop('cacheable', True)
765 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
766 2054 aaronmk
767 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
768
        log_level=log_level)
769 2054 aaronmk
770 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
771 3181 aaronmk
    embeddable=False, ignore=False, src=None):
772 1960 aaronmk
    '''
773
    @param returning str|None An inserted column (such as pkey) to return
774 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
775 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
776
        query will be fully cached, not just if it raises an exception.
777 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
778 3181 aaronmk
    @param src Will be included in the name of any created function, to help
779
        identify the data source in pg_stat_activity.
780 1960 aaronmk
    '''
781 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
782 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
783 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
784 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
785 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
786 2063 aaronmk
787 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
788 2063 aaronmk
789 3009 aaronmk
    def mk_insert(select_query):
790
        query = first_line
791 3014 aaronmk
        if cols != None:
792
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
793 3009 aaronmk
        query += '\n'+select_query
794
795
        if returning != None:
796
            returning_name_col = sql_gen.to_name_only_col(returning)
797
            query += '\nRETURNING '+returning_name_col.to_str(db)
798
799
        return query
800 2063 aaronmk
801 3489 aaronmk
    return_type = sql_gen.CustomCode('unknown')
802
    if returning != None: return_type = sql_gen.ColType(returning)
803 3017 aaronmk
804 3009 aaronmk
    if ignore:
805 3017 aaronmk
        # Always return something to set the correct rowcount
806
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
807
808 3009 aaronmk
        embeddable = True # must use function
809 3010 aaronmk
810 3450 aaronmk
        if cols == None: row = [sql_gen.Col(sql_gen.all_cols, 'row')]
811
        else: row = [sql_gen.Col(c.name, 'row') for c in cols]
812 3092 aaronmk
813 3484 aaronmk
        query = sql_gen.RowExcIgnore(sql_gen.RowType(table), select_query,
814 3497 aaronmk
            sql_gen.ReturnQuery(mk_insert(sql_gen.Values(row).to_str(db))),
815
            cols)
816 3009 aaronmk
    else: query = mk_insert(select_query)
817
818 2070 aaronmk
    if embeddable:
819
        # Create function
820 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
821 3181 aaronmk
        if src != None: function_name = src+': '+function_name
822 2189 aaronmk
        while True:
823
            try:
824 3451 aaronmk
                func = db.TempFunction(function_name)
825 3489 aaronmk
                def_ = sql_gen.FunctionDef(func, sql_gen.SetOf(return_type),
826
                    query)
827 2194 aaronmk
828 3443 aaronmk
                run_query(db, def_.to_str(db), recover=True, cacheable=True,
829 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
830 2189 aaronmk
                break # this version was successful
831 2945 aaronmk
            except DuplicateException, e:
832 2189 aaronmk
                function_name = next_version(function_name)
833
                # try again with next version of name
834 2070 aaronmk
835 2337 aaronmk
        # Return query that uses function
836 3009 aaronmk
        cols = None
837
        if returning != None: cols = [returning]
838 3451 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(func), cols)
839
            # AS clause requires function alias
840 3298 aaronmk
        return mk_select(db, func_table, order_by=None)
841 2070 aaronmk
842 2787 aaronmk
    return query
843 2066 aaronmk
844 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
845 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
846 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
847
        values in
848 2072 aaronmk
    '''
849 3141 aaronmk
    returning = kw_args.get('returning', None)
850
    ignore = kw_args.get('ignore', False)
851
852 2386 aaronmk
    into = kw_args.pop('into', None)
853
    if into != None: kw_args['embeddable'] = True
854 2066 aaronmk
    recover = kw_args.pop('recover', None)
855 3141 aaronmk
    if ignore: recover = True
856 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
857 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
858 2066 aaronmk
859 3141 aaronmk
    rowcount_only = ignore and returning == None # keep NULL rows on server
860
    if rowcount_only: into = sql_gen.Table('rowcount')
861
862 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
863
        into, recover=recover, cacheable=cacheable, log_level=log_level)
864 3141 aaronmk
    if rowcount_only: empty_temp(db, into)
865 3074 aaronmk
    autoanalyze(db, table)
866
    return cur
867 2063 aaronmk
868 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
869 2066 aaronmk
870 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
871 2085 aaronmk
    '''For params, see insert_select()'''
872 5050 aaronmk
    ignore = kw_args.pop('ignore', False)
873 5094 aaronmk
    if ignore: kw_args.setdefault('recover', True)
874 5050 aaronmk
875 1960 aaronmk
    if lists.is_seq(row): cols = None
876
    else:
877
        cols = row.keys()
878
        row = row.values()
879 2738 aaronmk
    row = list(row) # ensure that "== []" works
880 1960 aaronmk
881 2738 aaronmk
    if row == []: query = None
882
    else: query = sql_gen.Values(row).to_str(db)
883 1961 aaronmk
884 5050 aaronmk
    try: return insert_select(db, table, cols, query, *args, **kw_args)
885 5057 aaronmk
    except (DuplicateKeyException, NullValueException):
886 5050 aaronmk
        if not ignore: raise
887 11 aaronmk
888 3152 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False,
889 3153 aaronmk
    cacheable_=True):
890 2402 aaronmk
    '''
891
    @param changes [(col, new_value),...]
892
        * container can be any iterable type
893
        * col: sql_gen.Code|str (for col name)
894
        * new_value: sql_gen.Code|literal value
895
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
896 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
897
        This avoids creating dead rows in PostgreSQL.
898
        * cond must be None
899 3153 aaronmk
    @param cacheable_ Whether column structure information used to generate the
900 3152 aaronmk
        query can be cached
901 2402 aaronmk
    @return str query
902
    '''
903 3057 aaronmk
    table = sql_gen.as_Table(table)
904
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
905
        for c, v in changes]
906
907 3056 aaronmk
    if in_place:
908
        assert cond == None
909 3058 aaronmk
910 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
911
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '
912 3153 aaronmk
            +db.col_info(sql_gen.with_default_table(c, table), cacheable_).type
913 3065 aaronmk
            +'\nUSING '+v.to_str(db) for c, v in changes))
914 3058 aaronmk
    else:
915
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
916
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
917
            for c, v in changes))
918
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
919 3056 aaronmk
920 3266 aaronmk
    query = with_explain_comment(db, query)
921
922 2402 aaronmk
    return query
923
924 3074 aaronmk
def update(db, table, *args, **kw_args):
925 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
926
    recover = kw_args.pop('recover', None)
927 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
928 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
929 2402 aaronmk
930 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
931
        cacheable, log_level=log_level)
932
    autoanalyze(db, table)
933
    return cur
934 2402 aaronmk
935 3286 aaronmk
def mk_delete(db, table, cond=None):
936
    '''
937
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
938
    @return str query
939
    '''
940
    query = 'DELETE FROM '+table.to_str(db)
941
    if cond != None: query += '\nWHERE '+cond.to_str(db)
942
943
    query = with_explain_comment(db, query)
944
945
    return query
946
947
def delete(db, table, *args, **kw_args):
948
    '''For params, see mk_delete() and run_query()'''
949
    recover = kw_args.pop('recover', None)
950 3295 aaronmk
    cacheable = kw_args.pop('cacheable', True)
951 3286 aaronmk
    log_level = kw_args.pop('log_level', 2)
952
953
    cur = run_query(db, mk_delete(db, table, *args, **kw_args), recover,
954
        cacheable, log_level=log_level)
955
    autoanalyze(db, table)
956
    return cur
957
958 135 aaronmk
def last_insert_id(db):
959 1849 aaronmk
    module = util.root_module(db.db)
960 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
961
    elif module == 'MySQLdb': return db.insert_id()
962
    else: return None
963 13 aaronmk
964 3490 aaronmk
def define_func(db, def_):
965
    func = def_.function
966
    while True:
967
        try:
968
            run_query(db, def_.to_str(db), recover=True, cacheable=True,
969
                log_ignore_excs=(DuplicateException,))
970
            break # successful
971
        except DuplicateException:
972 3495 aaronmk
            func.name = next_version(func.name)
973 3490 aaronmk
            # try again with next version of name
974
975 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
976 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
977 2415 aaronmk
    to names that will be distinct among the columns' tables.
978 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
979 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
980
    @param into The table for the new columns.
981 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
982
        columns will be included in the mapping even if they are not in cols.
983
        The tables of the provided Col objects will be changed to into, so make
984
        copies of them if you want to keep the original tables.
985
    @param as_items Whether to return a list of dict items instead of a dict
986 2383 aaronmk
    @return dict(orig_col=new_col, ...)
987
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
988 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
989
        * All mappings use the into table so its name can easily be
990 2383 aaronmk
          changed for all columns at once
991
    '''
992 2415 aaronmk
    cols = lists.uniqify(cols)
993
994 2394 aaronmk
    items = []
995 2389 aaronmk
    for col in preserve:
996 2390 aaronmk
        orig_col = copy.copy(col)
997 2392 aaronmk
        col.table = into
998 2394 aaronmk
        items.append((orig_col, col))
999
    preserve = set(preserve)
1000
    for col in cols:
1001 2716 aaronmk
        if col not in preserve:
1002 3750 aaronmk
            items.append((col, sql_gen.Col(strings.ustr(col), into, col.srcs)))
1003 2394 aaronmk
1004
    if not as_items: items = dict(items)
1005
    return items
1006 2383 aaronmk
1007 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
1008 2391 aaronmk
    '''For params, see mk_flatten_mapping()
1009
    @return See return value of mk_flatten_mapping()
1010
    '''
1011 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
1012
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
1013 3296 aaronmk
    run_query_into(db, mk_select(db, joins, cols, order_by=None, limit=limit,
1014 3305 aaronmk
        start=start), into=into, add_pkey_=True)
1015 3708 aaronmk
        # don't cache because the temp table will usually be truncated after use
1016 2394 aaronmk
    return dict(items)
1017 2391 aaronmk
1018 3079 aaronmk
##### Database structure introspection
1019 2414 aaronmk
1020 3321 aaronmk
#### Expressions
1021
1022 3353 aaronmk
bool_re = r'(?:true|false)'
1023
1024
def simplify_expr(expr):
1025
    expr = expr.replace('(NULL IS NULL)', 'true')
1026
    expr = expr.replace('(NULL IS NOT NULL)', 'false')
1027
    expr = re.sub(r' OR '+bool_re, r'', expr)
1028
    expr = re.sub(bool_re+r' OR ', r'', expr)
1029
    while True:
1030
        expr, n = re.subn(r'\((\([^()]*\))\)', r'\1', expr)
1031
        if n == 0: break
1032
    return expr
1033
1034 3321 aaronmk
name_re = r'(?:\w+|(?:"[^"]*")+)'
1035
1036
def parse_expr_col(str_):
1037
    match = re.match(r'^\('+name_re+r'\(('+name_re+r').*\)\)$', str_)
1038
    if match: str_ = match.group(1)
1039
    return sql_gen.unesc_name(str_)
1040
1041 3351 aaronmk
def map_expr(db, expr, mapping, in_cols_found=None):
1042
    '''Replaces output columns with input columns in an expression.
1043
    @param in_cols_found If set, will be filled in with the expr's (input) cols
1044
    '''
1045
    for out, in_ in mapping.iteritems():
1046
        orig_expr = expr
1047
        out = sql_gen.to_name_only_col(out)
1048
        in_str = sql_gen.to_name_only_col(sql_gen.remove_col_rename(in_)
1049
            ).to_str(db)
1050
1051
        # Replace out both with and without quotes
1052
        expr = expr.replace(out.to_str(db), in_str)
1053 4490 aaronmk
        expr = re.sub(r'(?<!\.)\b'+out.name+r'\b(?!\.)', in_str, expr)
1054 3351 aaronmk
1055
        if in_cols_found != None and expr != orig_expr: # replaced something
1056
            in_cols_found.append(in_)
1057 3353 aaronmk
1058
    return simplify_expr(expr)
1059 3351 aaronmk
1060 3079 aaronmk
#### Tables
1061
1062 4555 aaronmk
def tables(db, schema_like='public', table_like='%', exact=False,
1063
    cacheable=True):
1064 3079 aaronmk
    if exact: compare = '='
1065
    else: compare = 'LIKE'
1066
1067
    module = util.root_module(db.db)
1068
    if module == 'psycopg2':
1069
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1070
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1071
        return values(select(db, 'pg_tables', ['tablename'], conds,
1072 4555 aaronmk
            order_by='tablename', cacheable=cacheable, log_level=4))
1073 3079 aaronmk
    elif module == 'MySQLdb':
1074
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1075
            , cacheable=True, log_level=4))
1076
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1077
1078 4556 aaronmk
def table_exists(db, table, cacheable=True):
1079 3079 aaronmk
    table = sql_gen.as_Table(table)
1080 4556 aaronmk
    return list(tables(db, table.schema, table.name, True, cacheable)) != []
1081 3079 aaronmk
1082 2426 aaronmk
def table_row_count(db, table, recover=None):
1083 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
1084 3298 aaronmk
        order_by=None), recover=recover, log_level=3))
1085 2426 aaronmk
1086 2414 aaronmk
def table_cols(db, table, recover=None):
1087
    return list(col_names(select(db, table, limit=0, order_by=None,
1088 2443 aaronmk
        recover=recover, log_level=4)))
1089 2414 aaronmk
1090 4261 aaronmk
pkey_col = 'row_num'
1091
1092 2291 aaronmk
def pkey(db, table, recover=None):
1093 5061 aaronmk
    '''If no pkey, returns the first column in the table.'''
1094
    table = sql_gen.as_Table(table)
1095
1096
    join_cols = ['table_schema', 'table_name', 'constraint_schema',
1097
        'constraint_name']
1098
    tables = [sql_gen.Table('key_column_usage', 'information_schema'),
1099
        sql_gen.Join(sql_gen.Table('table_constraints', 'information_schema'),
1100
            dict(((c, sql_gen.join_same_not_null) for c in join_cols)))]
1101
    cols = [sql_gen.Col('column_name')]
1102
1103
    conds = [('constraint_type', 'PRIMARY KEY'), ('table_name', table.name)]
1104
    schema = table.schema
1105
    if schema != None: conds.append(('table_schema', schema))
1106
    order_by = 'position_in_unique_constraint'
1107
1108
    try: return value(select(db, tables, cols, conds, order_by=order_by,
1109
        limit=1, log_level=4))
1110
    except StopIteration: return table_cols(db, table, recover)[0]
1111 832 aaronmk
1112 2559 aaronmk
not_null_col = 'not_null_col'
1113 2340 aaronmk
1114
def table_not_null_col(db, table, recover=None):
1115
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
1116
    if not_null_col in table_cols(db, table, recover): return not_null_col
1117
    else: return pkey(db, table, recover)
1118
1119 3348 aaronmk
def constraint_cond(db, constraint):
1120
    module = util.root_module(db.db)
1121
    if module == 'psycopg2':
1122
        table_str = sql_gen.Literal(constraint.table.to_str(db))
1123
        name_str = sql_gen.Literal(constraint.name)
1124
        return value(run_query(db, '''\
1125
SELECT consrc
1126
FROM pg_constraint
1127
WHERE
1128
conrelid = '''+table_str.to_str(db)+'''::regclass
1129
AND conname = '''+name_str.to_str(db)+'''
1130
'''
1131
            , cacheable=True, log_level=4))
1132
    else: raise NotImplementedError("Can't list index columns for "+module+
1133
        ' database')
1134
1135 3319 aaronmk
def index_cols(db, index):
1136 853 aaronmk
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
1137
    automatically created. When you don't know whether something is a UNIQUE
1138
    constraint or a UNIQUE index, use this function.'''
1139 3322 aaronmk
    index = sql_gen.as_Table(index)
1140 1909 aaronmk
    module = util.root_module(db.db)
1141
    if module == 'psycopg2':
1142 3322 aaronmk
        qual_index = sql_gen.Literal(index.to_str(db))
1143
        return map(parse_expr_col, values(run_query(db, '''\
1144
SELECT pg_get_indexdef(indexrelid, generate_series(1, indnatts), true)
1145
FROM pg_index
1146
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1147 2782 aaronmk
'''
1148
            , cacheable=True, log_level=4)))
1149 1909 aaronmk
    else: raise NotImplementedError("Can't list index columns for "+module+
1150
        ' database')
1151 853 aaronmk
1152 3079 aaronmk
#### Functions
1153
1154
def function_exists(db, function):
1155 3423 aaronmk
    qual_function = sql_gen.Literal(function.to_str(db))
1156
    try:
1157 3425 aaronmk
        select(db, fields=[sql_gen.Cast('regproc', qual_function)],
1158
            recover=True, cacheable=True, log_level=4)
1159 3423 aaronmk
    except DoesNotExistException: return False
1160 4146 aaronmk
    except DuplicateException: return True # overloaded function
1161 3423 aaronmk
    else: return True
1162 3079 aaronmk
1163
##### Structural changes
1164
1165
#### Columns
1166
1167 5020 aaronmk
def add_col(db, table, col, comment=None, if_not_exists=False, **kw_args):
1168 3079 aaronmk
    '''
1169
    @param col TypedCol Name may be versioned, so be sure to propagate any
1170
        renaming back to any source column for the TypedCol.
1171
    @param comment None|str SQL comment used to distinguish columns of the same
1172
        name from each other when they contain different data, to allow the
1173
        ADD COLUMN query to be cached. If not set, query will not be cached.
1174
    '''
1175
    assert isinstance(col, sql_gen.TypedCol)
1176
1177
    while True:
1178
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1179
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1180
1181
        try:
1182
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1183
            break
1184
        except DuplicateException:
1185 5020 aaronmk
            if if_not_exists: raise
1186 3079 aaronmk
            col.name = next_version(col.name)
1187
            # try again with next version of name
1188
1189
def add_not_null(db, col):
1190
    table = col.table
1191
    col = sql_gen.to_name_only_col(col)
1192
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1193
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1194
1195 4443 aaronmk
def drop_not_null(db, col):
1196
    table = col.table
1197
    col = sql_gen.to_name_only_col(col)
1198
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1199
        +col.to_str(db)+' DROP NOT NULL', cacheable=True, log_level=3)
1200
1201 2096 aaronmk
row_num_col = '_row_num'
1202
1203 4997 aaronmk
row_num_col_def = sql_gen.TypedCol('', 'serial', nullable=False,
1204 3079 aaronmk
    constraints='PRIMARY KEY')
1205
1206 4997 aaronmk
def add_row_num(db, table, name=row_num_col):
1207
    '''Adds a row number column to a table. Its definition is in
1208
    row_num_col_def. It will be the primary key.'''
1209
    col_def = copy.copy(row_num_col_def)
1210
    col_def.name = name
1211 5021 aaronmk
    add_col(db, table, col_def, comment='', if_not_exists=True, log_level=3)
1212 3079 aaronmk
1213
#### Indexes
1214
1215
def add_pkey(db, table, cols=None, recover=None):
1216
    '''Adds a primary key.
1217
    @param cols [sql_gen.Col,...] The columns in the primary key.
1218
        Defaults to the first column in the table.
1219
    @pre The table must not already have a primary key.
1220
    '''
1221
    table = sql_gen.as_Table(table)
1222
    if cols == None: cols = [pkey(db, table, recover)]
1223
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1224
1225
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1226
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1227
        log_ignore_excs=(DuplicateException,))
1228
1229 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1230 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1231 3356 aaronmk
    Currently, only function calls and literal values are supported expressions.
1232 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1233 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1234 2538 aaronmk
    '''
1235 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1236 2538 aaronmk
1237 2688 aaronmk
    # Parse exprs
1238
    old_exprs = exprs[:]
1239
    exprs = []
1240
    cols = []
1241
    for i, expr in enumerate(old_exprs):
1242 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1243 2688 aaronmk
1244 2823 aaronmk
        # Handle nullable columns
1245 2998 aaronmk
        if ensure_not_null_:
1246 3164 aaronmk
            try: expr = sql_gen.ensure_not_null(db, expr)
1247 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1248 2823 aaronmk
1249 2688 aaronmk
        # Extract col
1250 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1251 3356 aaronmk
        col = expr
1252
        if isinstance(expr, sql_gen.FunctionCall): col = expr.args[0]
1253
        expr = sql_gen.cast_literal(expr)
1254
        if not isinstance(expr, (sql_gen.Expr, sql_gen.Col)):
1255 2688 aaronmk
            expr = sql_gen.Expr(expr)
1256 3356 aaronmk
1257 2688 aaronmk
1258
        # Extract table
1259
        if table == None:
1260
            assert sql_gen.is_table_col(col)
1261
            table = col.table
1262
1263 3356 aaronmk
        if isinstance(col, sql_gen.Col): col.table = None
1264 2688 aaronmk
1265
        exprs.append(expr)
1266
        cols.append(col)
1267 2408 aaronmk
1268 2688 aaronmk
    table = sql_gen.as_Table(table)
1269
1270 3005 aaronmk
    # Add index
1271 3148 aaronmk
    str_ = 'CREATE'
1272
    if unique: str_ += ' UNIQUE'
1273
    str_ += ' INDEX ON '+table.to_str(db)+' ('+(
1274
        ', '.join((v.to_str(db) for v in exprs)))+')'
1275
    run_query(db, str_, recover=True, cacheable=True, log_level=3)
1276 2408 aaronmk
1277 3083 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1278
1279
def add_indexes(db, table, has_pkey=True):
1280
    '''Adds an index on all columns in a table.
1281
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1282
        index should be added on the first column.
1283
        * If already_indexed, the pkey is assumed to have already been added
1284
    '''
1285
    cols = table_cols(db, table)
1286
    if has_pkey:
1287
        if has_pkey is not already_indexed: add_pkey(db, table)
1288
        cols = cols[1:]
1289
    for col in cols: add_index(db, col, table)
1290
1291 3079 aaronmk
#### Tables
1292 2772 aaronmk
1293 3079 aaronmk
### Maintenance
1294 2772 aaronmk
1295 3079 aaronmk
def analyze(db, table):
1296
    table = sql_gen.as_Table(table)
1297
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1298 2934 aaronmk
1299 3079 aaronmk
def autoanalyze(db, table):
1300
    if db.autoanalyze: analyze(db, table)
1301 2935 aaronmk
1302 3079 aaronmk
def vacuum(db, table):
1303
    table = sql_gen.as_Table(table)
1304
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1305
        log_level=3))
1306 2086 aaronmk
1307 3079 aaronmk
### Lifecycle
1308
1309 3247 aaronmk
def drop(db, type_, name):
1310
    name = sql_gen.as_Name(name)
1311
    run_query(db, 'DROP '+type_+' IF EXISTS '+name.to_str(db)+' CASCADE')
1312 2889 aaronmk
1313 3247 aaronmk
def drop_table(db, table): drop(db, 'TABLE', table)
1314
1315 3082 aaronmk
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1316
    like=None):
1317 2675 aaronmk
    '''Creates a table.
1318 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1319
    @param has_pkey If set, the first column becomes the primary key.
1320 2760 aaronmk
    @param col_indexes bool|[ref]
1321
        * If True, indexes will be added on all non-pkey columns.
1322
        * If a list reference, [0] will be set to a function to do this.
1323
          This can be used to delay index creation until the table is populated.
1324 2675 aaronmk
    '''
1325
    table = sql_gen.as_Table(table)
1326
1327 3082 aaronmk
    if like != None:
1328
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1329
            ]+cols
1330 2681 aaronmk
    if has_pkey:
1331
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1332 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1333 2681 aaronmk
1334 3085 aaronmk
    temp = table.is_temp and not db.debug_temp
1335
        # temp tables permanent in debug_temp mode
1336 2760 aaronmk
1337 3085 aaronmk
    # Create table
1338 3383 aaronmk
    def create():
1339 3085 aaronmk
        str_ = 'CREATE'
1340
        if temp: str_ += ' TEMP'
1341
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1342
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1343 3126 aaronmk
        str_ += '\n);'
1344 3085 aaronmk
1345 3383 aaronmk
        run_query(db, str_, recover=True, cacheable=True, log_level=2,
1346
            log_ignore_excs=(DuplicateException,))
1347
    if table.is_temp:
1348
        while True:
1349
            try:
1350
                create()
1351
                break
1352
            except DuplicateException:
1353
                table.name = next_version(table.name)
1354
                # try again with next version of name
1355
    else: create()
1356 3085 aaronmk
1357 2760 aaronmk
    # Add indexes
1358 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1359
    def add_indexes_(): add_indexes(db, table, has_pkey)
1360
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1361
    elif col_indexes: add_indexes_() # add now
1362 2675 aaronmk
1363 3084 aaronmk
def copy_table_struct(db, src, dest):
1364
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1365 3085 aaronmk
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1366 3084 aaronmk
1367 3079 aaronmk
### Data
1368 2684 aaronmk
1369 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1370
    '''For params, see run_query()'''
1371 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1372 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1373 2732 aaronmk
1374 2965 aaronmk
def empty_temp(db, tables):
1375
    tables = lists.mk_seq(tables)
1376 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1377 2965 aaronmk
1378 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1379
    '''For kw_args, see tables()'''
1380
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1381 3094 aaronmk
1382
def distinct_table(db, table, distinct_on):
1383
    '''Creates a copy of a temp table which is distinct on the given columns.
1384 3099 aaronmk
    The old and new tables will both get an index on these columns, to
1385
    facilitate merge joins.
1386 3097 aaronmk
    @param distinct_on If empty, creates a table with one row. This is useful if
1387
        your distinct_on columns are all literal values.
1388 3099 aaronmk
    @return The new table.
1389 3094 aaronmk
    '''
1390 3099 aaronmk
    new_table = sql_gen.suffixed_table(table, '_distinct')
1391 3411 aaronmk
    distinct_on = filter(sql_gen.is_table_col, distinct_on)
1392 3094 aaronmk
1393 3099 aaronmk
    copy_table_struct(db, table, new_table)
1394 3097 aaronmk
1395
    limit = None
1396
    if distinct_on == []: limit = 1 # one sample row
1397 3099 aaronmk
    else:
1398
        add_index(db, distinct_on, new_table, unique=True)
1399
        add_index(db, distinct_on, table) # for join optimization
1400 3097 aaronmk
1401 3313 aaronmk
    insert_select(db, new_table, None, mk_select(db, table, order_by=None,
1402
        limit=limit), ignore=True)
1403 3099 aaronmk
    analyze(db, new_table)
1404 3094 aaronmk
1405 3099 aaronmk
    return new_table