Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 11 aaronmk
import re
5 3238 aaronmk
import time
6 865 aaronmk
import warnings
7 11 aaronmk
8 300 aaronmk
import exc
9 1909 aaronmk
import dicts
10 1893 aaronmk
import iters
11 1960 aaronmk
import lists
12 3241 aaronmk
import profiling
13 1889 aaronmk
from Proxy import Proxy
14 1872 aaronmk
import rand
15 2217 aaronmk
import sql_gen
16 862 aaronmk
import strings
17 131 aaronmk
import util
18 11 aaronmk
19 832 aaronmk
##### Exceptions
20
21 2804 aaronmk
def get_cur_query(cur, input_query=None):
22 2168 aaronmk
    raw_query = None
23
    if hasattr(cur, 'query'): raw_query = cur.query
24
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
25 2170 aaronmk
26
    if raw_query != None: return raw_query
27 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
28 14 aaronmk
29 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
30
    '''For params, see get_cur_query()'''
31 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
32 135 aaronmk
33 300 aaronmk
class DbException(exc.ExceptionWithCause):
34 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
35 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
36 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
37
38 2143 aaronmk
class ExceptionWithName(DbException):
39
    def __init__(self, name, cause=None):
40 4491 aaronmk
        DbException.__init__(self, 'for name: '
41
            +strings.as_tt(strings.ustr(name)), cause)
42 2143 aaronmk
        self.name = name
43 360 aaronmk
44 3109 aaronmk
class ExceptionWithValue(DbException):
45
    def __init__(self, value, cause=None):
46 4492 aaronmk
        DbException.__init__(self, 'for value: '
47
            +strings.as_tt(strings.urepr(value)), cause)
48 2240 aaronmk
        self.value = value
49
50 2945 aaronmk
class ExceptionWithNameType(DbException):
51
    def __init__(self, type_, name, cause=None):
52 4491 aaronmk
        DbException.__init__(self, 'for type: '+strings.as_tt(strings.ustr(
53
            type_))+'; name: '+strings.as_tt(name), cause)
54 2945 aaronmk
        self.type = type_
55
        self.name = name
56
57 2306 aaronmk
class ConstraintException(DbException):
58 3345 aaronmk
    def __init__(self, name, cond, cols, cause=None):
59
        msg = 'Violated '+strings.as_tt(name)+' constraint'
60 5447 aaronmk
        if cond != None: msg += ' with condition '+strings.as_tt(cond)
61 3345 aaronmk
        if cols != []: msg += ' on columns: '+strings.as_tt(', '.join(cols))
62
        DbException.__init__(self, msg, cause)
63 2306 aaronmk
        self.name = name
64 3345 aaronmk
        self.cond = cond
65 468 aaronmk
        self.cols = cols
66 11 aaronmk
67 2523 aaronmk
class MissingCastException(DbException):
68 4139 aaronmk
    def __init__(self, type_, col=None, cause=None):
69
        msg = 'Missing cast to type '+strings.as_tt(type_)
70
        if col != None: msg += ' on column: '+strings.as_tt(col)
71
        DbException.__init__(self, msg, cause)
72 2523 aaronmk
        self.type = type_
73
        self.col = col
74
75 5576 aaronmk
class EncodingException(ExceptionWithName): pass
76
77 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
78 13 aaronmk
79 2306 aaronmk
class NullValueException(ConstraintException): pass
80 13 aaronmk
81 3346 aaronmk
class CheckException(ConstraintException): pass
82
83 3109 aaronmk
class InvalidValueException(ExceptionWithValue): pass
84 2239 aaronmk
85 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
86 2143 aaronmk
87 3419 aaronmk
class DoesNotExistException(ExceptionWithNameType): pass
88
89 89 aaronmk
class EmptyRowException(DbException): pass
90
91 865 aaronmk
##### Warnings
92
93
class DbWarning(UserWarning): pass
94
95 1930 aaronmk
##### Result retrieval
96
97
def col_names(cur): return (col[0] for col in cur.description)
98
99
def rows(cur): return iter(lambda: cur.fetchone(), None)
100
101
def consume_rows(cur):
102
    '''Used to fetch all rows so result will be cached'''
103
    iters.consume_iter(rows(cur))
104
105
def next_row(cur): return rows(cur).next()
106
107
def row(cur):
108
    row_ = next_row(cur)
109
    consume_rows(cur)
110
    return row_
111
112
def next_value(cur): return next_row(cur)[0]
113
114
def value(cur): return row(cur)[0]
115
116
def values(cur): return iters.func_iter(lambda: next_value(cur))
117
118
def value_or_none(cur):
119
    try: return value(cur)
120
    except StopIteration: return None
121
122 2762 aaronmk
##### Escaping
123 2101 aaronmk
124 2573 aaronmk
def esc_name_by_module(module, name):
125
    if module == 'psycopg2' or module == None: quote = '"'
126 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
127
    else: raise NotImplementedError("Can't escape name for "+module+' database')
128 2500 aaronmk
    return sql_gen.esc_name(name, quote)
129 2101 aaronmk
130
def esc_name_by_engine(engine, name, **kw_args):
131
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
132
133
def esc_name(db, name, **kw_args):
134
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
135
136
def qual_name(db, schema, table):
137
    def esc_name_(name): return esc_name(db, name)
138
    table = esc_name_(table)
139
    if schema != None: return esc_name_(schema)+'.'+table
140
    else: return table
141
142 1869 aaronmk
##### Database connections
143 1849 aaronmk
144 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
145 1926 aaronmk
146 1869 aaronmk
db_engines = {
147
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
148
    'PostgreSQL': ('psycopg2', {}),
149
}
150
151
DatabaseErrors_set = set([DbException])
152
DatabaseErrors = tuple(DatabaseErrors_set)
153
154
def _add_module(module):
155
    DatabaseErrors_set.add(module.DatabaseError)
156
    global DatabaseErrors
157
    DatabaseErrors = tuple(DatabaseErrors_set)
158
159
def db_config_str(db_config):
160
    return db_config['engine']+' database '+db_config['database']
161
162 2448 aaronmk
log_debug_none = lambda msg, level=2: None
163 1901 aaronmk
164 1849 aaronmk
class DbConn:
165 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
166 3183 aaronmk
        log_debug=log_debug_none, debug_temp=False, src=None):
167 2915 aaronmk
        '''
168
        @param debug_temp Whether temporary objects should instead be permanent.
169
            This assists in debugging the internal objects used by the program.
170 3183 aaronmk
        @param src In autocommit mode, will be included in a comment in every
171
            query, to help identify the data source in pg_stat_activity.
172 2915 aaronmk
        '''
173 1869 aaronmk
        self.db_config = db_config
174 2190 aaronmk
        self.autocommit = autocommit
175
        self.caching = caching
176 1901 aaronmk
        self.log_debug = log_debug
177 2193 aaronmk
        self.debug = log_debug != log_debug_none
178 2915 aaronmk
        self.debug_temp = debug_temp
179 3183 aaronmk
        self.src = src
180 3074 aaronmk
        self.autoanalyze = False
181 3269 aaronmk
        self.autoexplain = False
182
        self.profile_row_ct = None
183 1869 aaronmk
184 3124 aaronmk
        self._savepoint = 0
185 3120 aaronmk
        self._reset()
186 1869 aaronmk
187
    def __getattr__(self, name):
188
        if name == '__dict__': raise Exception('getting __dict__')
189
        if name == 'db': return self._db()
190
        else: raise AttributeError()
191
192
    def __getstate__(self):
193
        state = copy.copy(self.__dict__) # shallow copy
194 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
195 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
196
        return state
197
198 3118 aaronmk
    def clear_cache(self): self.query_results = {}
199
200 3120 aaronmk
    def _reset(self):
201 3118 aaronmk
        self.clear_cache()
202 3124 aaronmk
        assert self._savepoint == 0
203 3118 aaronmk
        self._notices_seen = set()
204
        self.__db = None
205
206 2165 aaronmk
    def connected(self): return self.__db != None
207
208 3116 aaronmk
    def close(self):
209 3119 aaronmk
        if not self.connected(): return
210
211 3135 aaronmk
        # Record that the automatic transaction is now closed
212 3136 aaronmk
        self._savepoint -= 1
213 3135 aaronmk
214 3119 aaronmk
        self.db.close()
215 3120 aaronmk
        self._reset()
216 3116 aaronmk
217 3125 aaronmk
    def reconnect(self):
218
        # Do not do this in test mode as it would roll back everything
219
        if self.autocommit: self.close()
220
        # Connection will be reopened automatically on first query
221
222 1869 aaronmk
    def _db(self):
223
        if self.__db == None:
224
            # Process db_config
225
            db_config = self.db_config.copy() # don't modify input!
226 2097 aaronmk
            schemas = db_config.pop('schemas', None)
227 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
228
            module = __import__(module_name)
229
            _add_module(module)
230
            for orig, new in mappings.iteritems():
231
                try: util.rename_key(db_config, orig, new)
232
                except KeyError: pass
233
234
            # Connect
235
            self.__db = module.connect(**db_config)
236
237 3161 aaronmk
            # Record that a transaction is already open
238
            self._savepoint += 1
239
240 1869 aaronmk
            # Configure connection
241 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
242
                import psycopg2.extensions
243
                self.db.set_isolation_level(
244
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
245 2101 aaronmk
            if schemas != None:
246 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
247
                search_path.append(value(run_query(self, 'SHOW search_path',
248
                    log_level=4)))
249
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
250
                    log_level=3)
251 1869 aaronmk
252
        return self.__db
253 1889 aaronmk
254 1891 aaronmk
    class DbCursor(Proxy):
255 1927 aaronmk
        def __init__(self, outer):
256 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
257 2191 aaronmk
            self.outer = outer
258 1927 aaronmk
            self.query_results = outer.query_results
259 1894 aaronmk
            self.query_lookup = None
260 1891 aaronmk
            self.result = []
261 1889 aaronmk
262 2802 aaronmk
        def execute(self, query):
263 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
264 2797 aaronmk
            self.query_lookup = query
265 2148 aaronmk
            try:
266 3162 aaronmk
                try: cur = self.inner.execute(query)
267 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
268 1904 aaronmk
            except Exception, e:
269
                self.result = e # cache the exception as the result
270
                self._cache_result()
271
                raise
272 3004 aaronmk
273
            # Always cache certain queries
274 3183 aaronmk
            query = sql_gen.lstrip(query)
275 3004 aaronmk
            if query.startswith('CREATE') or query.startswith('ALTER'):
276 3007 aaronmk
                # structural changes
277 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
278
                # so don't cache ADD COLUMN unless it has distinguishing comment
279
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
280 3007 aaronmk
                    self._cache_result()
281 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
282 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
283 3004 aaronmk
284 2762 aaronmk
            return cur
285 1894 aaronmk
286 1891 aaronmk
        def fetchone(self):
287
            row = self.inner.fetchone()
288 1899 aaronmk
            if row != None: self.result.append(row)
289
            # otherwise, fetched all rows
290 1904 aaronmk
            else: self._cache_result()
291
            return row
292
293
        def _cache_result(self):
294 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
295
            # inserts are not idempotent. Other non-SELECT queries don't have
296
            # their result set read, so only exceptions will be cached (an
297
            # invalid query will always be invalid).
298 1930 aaronmk
            if self.query_results != None and (not self._is_insert
299 1906 aaronmk
                or isinstance(self.result, Exception)):
300
301 1894 aaronmk
                assert self.query_lookup != None
302 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
303
                    util.dict_subset(dicts.AttrsDictView(self),
304
                    ['query', 'result', 'rowcount', 'description']))
305 1906 aaronmk
306 1916 aaronmk
        class CacheCursor:
307
            def __init__(self, cached_result): self.__dict__ = cached_result
308
309 1927 aaronmk
            def execute(self, *args, **kw_args):
310 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
311
                # otherwise, result is a rows list
312
                self.iter = iter(self.result)
313
314
            def fetchone(self):
315
                try: return self.iter.next()
316
                except StopIteration: return None
317 1891 aaronmk
318 2212 aaronmk
    def esc_value(self, value):
319 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
320
        except NotImplementedError, e:
321
            module = util.root_module(self.db)
322
            if module == 'MySQLdb':
323
                import _mysql
324
                str_ = _mysql.escape_string(value)
325
            else: raise e
326 2374 aaronmk
        return strings.to_unicode(str_)
327 2212 aaronmk
328 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
329
330 2814 aaronmk
    def std_code(self, str_):
331
        '''Standardizes SQL code.
332
        * Ensures that string literals are prefixed by `E`
333
        '''
334
        if str_.startswith("'"): str_ = 'E'+str_
335
        return str_
336
337 2665 aaronmk
    def can_mogrify(self):
338 2663 aaronmk
        module = util.root_module(self.db)
339 2665 aaronmk
        return module == 'psycopg2'
340 2663 aaronmk
341 2665 aaronmk
    def mogrify(self, query, params=None):
342
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
343
        else: raise NotImplementedError("Can't mogrify query")
344
345 5579 aaronmk
    def set_encoding(self, encoding):
346
        encoding_str = sql_gen.Literal(encoding)
347
        run_query(self, 'SET NAMES '+encoding_str.to_str(self))
348
349 2671 aaronmk
    def print_notices(self):
350 2725 aaronmk
        if hasattr(self.db, 'notices'):
351
            for msg in self.db.notices:
352
                if msg not in self._notices_seen:
353
                    self._notices_seen.add(msg)
354
                    self.log_debug(msg, level=2)
355 2671 aaronmk
356 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
357 2464 aaronmk
        debug_msg_ref=None):
358 2445 aaronmk
        '''
359 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
360
            throws one of these exceptions.
361 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
362
            this instead of being output. This allows you to filter log messages
363
            depending on the result of the query.
364 2445 aaronmk
        '''
365 2167 aaronmk
        assert query != None
366
367 3183 aaronmk
        if self.autocommit and self.src != None:
368 3206 aaronmk
            query = sql_gen.esc_comment(self.src)+'\t'+query
369 3183 aaronmk
370 2047 aaronmk
        if not self.caching: cacheable = False
371 1903 aaronmk
        used_cache = False
372 2664 aaronmk
373 3242 aaronmk
        if self.debug:
374
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
375 1903 aaronmk
        try:
376 1927 aaronmk
            # Get cursor
377
            if cacheable:
378 3238 aaronmk
                try: cur = self.query_results[query]
379 1927 aaronmk
                except KeyError: cur = self.DbCursor(self)
380 3238 aaronmk
                else: used_cache = True
381 1927 aaronmk
            else: cur = self.db.cursor()
382
383
            # Run query
384 3238 aaronmk
            try: cur.execute(query)
385 3162 aaronmk
            except Exception, e:
386
                _add_cursor_info(e, self, query)
387
                raise
388 3238 aaronmk
            else: self.do_autocommit()
389 1903 aaronmk
        finally:
390 3242 aaronmk
            if self.debug:
391 3244 aaronmk
                profiler.stop(self.profile_row_ct)
392 3242 aaronmk
393
                ## Log or return query
394
395 4491 aaronmk
                query = strings.ustr(get_cur_query(cur, query))
396 3281 aaronmk
                # Put the src comment on a separate line in the log file
397
                query = query.replace('\t', '\n', 1)
398 3239 aaronmk
399 3240 aaronmk
                msg = 'DB query: '
400 3239 aaronmk
401 3240 aaronmk
                if used_cache: msg += 'cache hit'
402
                elif cacheable: msg += 'cache miss'
403
                else: msg += 'non-cacheable'
404 3239 aaronmk
405 3241 aaronmk
                msg += ':\n'+profiler.msg()+'\n'+strings.as_code(query, 'SQL')
406 3240 aaronmk
407 3237 aaronmk
                if debug_msg_ref != None: debug_msg_ref[0] = msg
408
                else: self.log_debug(msg, log_level)
409 3245 aaronmk
410
                self.print_notices()
411 1903 aaronmk
412
        return cur
413 1914 aaronmk
414 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
415 2139 aaronmk
416 2907 aaronmk
    def with_autocommit(self, func):
417 2801 aaronmk
        import psycopg2.extensions
418
419
        prev_isolation_level = self.db.isolation_level
420 2907 aaronmk
        self.db.set_isolation_level(
421
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
422 2683 aaronmk
        try: return func()
423 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
424 2683 aaronmk
425 2139 aaronmk
    def with_savepoint(self, func):
426 3137 aaronmk
        top = self._savepoint == 0
427 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
428 3137 aaronmk
429 3272 aaronmk
        if self.debug:
430 3273 aaronmk
            self.log_debug('Begin transaction', level=4)
431 3272 aaronmk
            profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
432
433 3160 aaronmk
        # Must happen before running queries so they don't get autocommitted
434
        self._savepoint += 1
435
436 3137 aaronmk
        if top: query = 'START TRANSACTION ISOLATION LEVEL READ COMMITTED'
437
        else: query = 'SAVEPOINT '+savepoint
438
        self.run_query(query, log_level=4)
439
        try:
440
            return func()
441
            if top: self.run_query('COMMIT', log_level=4)
442 2139 aaronmk
        except:
443 3137 aaronmk
            if top: query = 'ROLLBACK'
444
            else: query = 'ROLLBACK TO SAVEPOINT '+savepoint
445
            self.run_query(query, log_level=4)
446
447 2139 aaronmk
            raise
448 2930 aaronmk
        finally:
449
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
450
            # "The savepoint remains valid and can be rolled back to again"
451
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
452 3137 aaronmk
            if not top:
453
                self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
454 2930 aaronmk
455
            self._savepoint -= 1
456
            assert self._savepoint >= 0
457
458 3272 aaronmk
            if self.debug:
459
                profiler.stop(self.profile_row_ct)
460 3273 aaronmk
                self.log_debug('End transaction\n'+profiler.msg(), level=4)
461 3272 aaronmk
462 2930 aaronmk
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
463 2191 aaronmk
464
    def do_autocommit(self):
465
        '''Autocommits if outside savepoint'''
466 3135 aaronmk
        assert self._savepoint >= 1
467
        if self.autocommit and self._savepoint == 1:
468 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
469 2191 aaronmk
            self.db.commit()
470 2643 aaronmk
471 3155 aaronmk
    def col_info(self, col, cacheable=True):
472 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
473 4936 aaronmk
        cols = [sql_gen.Col('data_type'), sql_gen.Col('udt_name'),
474
            'column_default', sql_gen.Cast('boolean',
475
            sql_gen.Col('is_nullable'))]
476 2643 aaronmk
477 3750 aaronmk
        conds = [('table_name', col.table.name),
478
            ('column_name', strings.ustr(col.name))]
479 2643 aaronmk
        schema = col.table.schema
480
        if schema != None: conds.append(('table_schema', schema))
481
482 3638 aaronmk
        cur = select(self, table, cols, conds, order_by='table_schema', limit=1,
483
            cacheable=cacheable, log_level=4) # TODO: order by search_path order
484 4936 aaronmk
        try: type_, extra_type, default, nullable = row(cur)
485 4114 aaronmk
        except StopIteration: raise sql_gen.NoUnderlyingTableException(col)
486 2819 aaronmk
        default = sql_gen.as_Code(default, self)
487 4936 aaronmk
        if type_ == 'USER-DEFINED': type_ = extra_type
488 4939 aaronmk
        elif type_ == 'ARRAY':
489
            type_ = sql_gen.ArrayType(strings.remove_prefix('_', extra_type,
490
                require=True))
491 2819 aaronmk
492
        return sql_gen.TypedCol(col.name, type_, default, nullable)
493 2917 aaronmk
494
    def TempFunction(self, name):
495
        if self.debug_temp: schema = None
496
        else: schema = 'pg_temp'
497
        return sql_gen.Function(name, schema)
498 1849 aaronmk
499 1869 aaronmk
connect = DbConn
500
501 832 aaronmk
##### Recoverable querying
502 15 aaronmk
503 5577 aaronmk
def parse_exception(db, e, recover=False):
504
    msg = strings.ustr(e.args[0])
505
    msg = re.sub(r'^(?:PL/Python: )?ValueError: ', r'', msg)
506
507
    match = re.match(r'^invalid byte sequence for encoding "(.+?)":', msg)
508
    if match:
509
        encoding, = match.groups()
510
        raise EncodingException(encoding, e)
511
512 5763 aaronmk
    def make_DuplicateKeyException(constraint, e):
513 5577 aaronmk
        cols = []
514
        cond = None
515
        if recover: # need auto-rollback to run index_cols()
516
            try:
517
                cols = index_cols(db, constraint)
518
                cond = index_cond(db, constraint)
519
            except NotImplementedError: pass
520 5763 aaronmk
        return DuplicateKeyException(constraint, cond, cols, e)
521 5577 aaronmk
522 5763 aaronmk
    match = re.match(r'^duplicate key value violates unique constraint "(.+?)"',
523
        msg)
524
    if match:
525
        constraint, = match.groups()
526
        raise make_DuplicateKeyException(constraint, e)
527
528 5764 aaronmk
    match = re.match(r'^could not create unique index "(.+?)"\n'
529
        r'DETAIL:  Key .+? is duplicated', msg)
530
    if match:
531
        constraint, = match.groups()
532 5823 aaronmk
        raise DuplicateKeyException(constraint, None, [], e)
533 5764 aaronmk
534 5577 aaronmk
    match = re.match(r'^null value in column "(.+?)" violates not-null'
535
        r' constraint', msg)
536
    if match:
537
        col, = match.groups()
538
        raise NullValueException('NOT NULL', None, [col], e)
539
540
    match = re.match(r'^new row for relation "(.+?)" violates check '
541
        r'constraint "(.+?)"', msg)
542
    if match:
543
        table, constraint = match.groups()
544
        constraint = sql_gen.Col(constraint, table)
545
        cond = None
546
        if recover: # need auto-rollback to run constraint_cond()
547
            try: cond = constraint_cond(db, constraint)
548
            except NotImplementedError: pass
549
        raise CheckException(constraint.to_str(db), cond, [], e)
550
551 5723 aaronmk
    match = re.match(r'^(?:invalid input (?:syntax|value)\b[^:]*'
552 5717 aaronmk
        r'|.+? out of range)(?:: "(.+?)")?', msg)
553 5577 aaronmk
    if match:
554
        value, = match.groups()
555 5717 aaronmk
        value = util.do_ignore_none(strings.to_unicode, value)
556
        raise InvalidValueException(value, e)
557 5577 aaronmk
558
    match = re.match(r'^column "(.+?)" is of type (.+?) but expression '
559
        r'is of type', msg)
560
    if match:
561
        col, type_ = match.groups()
562
        raise MissingCastException(type_, col, e)
563
564
    match = re.match(r'^could not determine polymorphic type because '
565
        r'input has type "unknown"', msg)
566
    if match: raise MissingCastException('text', None, e)
567
568 5724 aaronmk
    match = re.match(r'^.+? types (.+?) and (.+?) cannot be matched', msg)
569
    if match:
570
        type0, type1 = match.groups()
571
        raise MissingCastException(type0, None, e)
572 5577 aaronmk
573 5707 aaronmk
    typed_name_re = r'^(\S+) "?(.+?)"?(?: of relation ".+?")?'
574 5577 aaronmk
575
    match = re.match(typed_name_re+r'.*? already exists', msg)
576
    if match:
577
        type_, name = match.groups()
578
        raise DuplicateException(type_, name, e)
579
580
    match = re.match(r'more than one (\S+) named ""(.+?)""', msg)
581
    if match:
582
        type_, name = match.groups()
583
        raise DuplicateException(type_, name, e)
584
585
    match = re.match(typed_name_re+r' does not exist', msg)
586
    if match:
587
        type_, name = match.groups()
588 5683 aaronmk
        if type_ == 'function':
589 5709 aaronmk
            match = re.match(r'^(.+?)\(.*\)$', name)
590 5710 aaronmk
            if match: # includes params, so is call rather than cast to regproc
591 5708 aaronmk
                function_name, = match.groups()
592 5711 aaronmk
                func = sql_gen.Function(function_name)
593 5712 aaronmk
                if function_exists(db, func) and msg.find('CAST') < 0:
594 5683 aaronmk
                    # not found only because of a missing cast
595 5715 aaronmk
                    type_ = function_param0_type(db, func)
596 5792 aaronmk
                    if type_ == 'anyelement': type_ = 'text'
597 5715 aaronmk
                    raise MissingCastException(type_, None, e)
598 5577 aaronmk
        raise DoesNotExistException(type_, name, e)
599
600
    raise # no specific exception raised
601
602 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
603 11 aaronmk
604 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
605
    log_ignore_excs=None, **kw_args):
606 2794 aaronmk
    '''For params, see DbConn.run_query()'''
607 830 aaronmk
    if recover == None: recover = False
608 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
609
    log_ignore_excs = tuple(log_ignore_excs)
610 3236 aaronmk
    debug_msg_ref = [None]
611 830 aaronmk
612 3267 aaronmk
    query = with_explain_comment(db, query)
613 3258 aaronmk
614 2148 aaronmk
    try:
615 2464 aaronmk
        try:
616 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
617 2793 aaronmk
                debug_msg_ref, **kw_args)
618 2796 aaronmk
            if recover and not db.is_cached(query):
619 2464 aaronmk
                return with_savepoint(db, run)
620
            else: return run() # don't need savepoint if cached
621 5577 aaronmk
        except Exception, e: parse_exception(db, e, recover)
622 2464 aaronmk
    except log_ignore_excs:
623
        log_level += 2
624
        raise
625
    finally:
626 3236 aaronmk
        if debug_msg_ref[0] != None: db.log_debug(debug_msg_ref[0], log_level)
627 830 aaronmk
628 832 aaronmk
##### Basic queries
629
630 3256 aaronmk
def is_explainable(query):
631
    # See <http://www.postgresql.org/docs/8.3/static/sql-explain.html#AEN57749>
632 3257 aaronmk
    return re.match(r'^(?:SELECT|INSERT|UPDATE|DELETE|VALUES|EXECUTE|DECLARE)\b'
633
        , query)
634 3256 aaronmk
635 3263 aaronmk
def explain(db, query, **kw_args):
636
    '''
637
    For params, see run_query().
638
    '''
639 3267 aaronmk
    kw_args.setdefault('log_level', 4)
640 3263 aaronmk
641 3750 aaronmk
    return strings.ustr(strings.join_lines(values(run_query(db,
642
        'EXPLAIN '+query, recover=True, cacheable=True, **kw_args))))
643 3256 aaronmk
        # not a higher log_level because it's useful to see what query is being
644
        # run before it's executed, which EXPLAIN effectively provides
645
646 3265 aaronmk
def has_comment(query): return query.endswith('*/')
647
648
def with_explain_comment(db, query, **kw_args):
649 3269 aaronmk
    if db.autoexplain and not has_comment(query) and is_explainable(query):
650 3265 aaronmk
        query += '\n'+sql_gen.esc_comment(' EXPLAIN:\n'
651
            +explain(db, query, **kw_args))
652
    return query
653
654 2153 aaronmk
def next_version(name):
655 2163 aaronmk
    version = 1 # first existing name was version 0
656 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
657 2153 aaronmk
    if match:
658 2586 aaronmk
        name, version = match.groups()
659
        version = int(version)+1
660 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
661 2153 aaronmk
662 2899 aaronmk
def lock_table(db, table, mode):
663
    table = sql_gen.as_Table(table)
664
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
665
666 3303 aaronmk
def run_query_into(db, query, into=None, add_pkey_=False, **kw_args):
667 2085 aaronmk
    '''Outputs a query to a temp table.
668
    For params, see run_query().
669
    '''
670 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
671 2790 aaronmk
672
    assert isinstance(into, sql_gen.Table)
673
674 2992 aaronmk
    into.is_temp = True
675 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
676
    into.schema = None
677 2992 aaronmk
678 2790 aaronmk
    kw_args['recover'] = True
679 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
680 2790 aaronmk
681 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
682 2790 aaronmk
683
    # Create table
684
    while True:
685
        create_query = 'CREATE'
686
        if temp: create_query += ' TEMP'
687
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
688 2385 aaronmk
689 2790 aaronmk
        try:
690
            cur = run_query(db, create_query, **kw_args)
691
                # CREATE TABLE AS sets rowcount to # rows in query
692
            break
693 2945 aaronmk
        except DuplicateException, e:
694 2790 aaronmk
            into.name = next_version(into.name)
695
            # try again with next version of name
696
697 3303 aaronmk
    if add_pkey_: add_pkey(db, into)
698 3075 aaronmk
699
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
700
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
701
    # table is going to be used in complex queries, it is wise to run ANALYZE on
702
    # the temporary table after it is populated."
703
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
704
    # If into is not a temp table, ANALYZE is useful but not required.
705 3073 aaronmk
    analyze(db, into)
706 2790 aaronmk
707
    return cur
708 2085 aaronmk
709 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
710
711 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
712
713 3420 aaronmk
def mk_select(db, tables=None, fields=None, conds=None, distinct_on=[],
714 3494 aaronmk
    limit=None, start=None, order_by=order_by_pkey, default_table=None,
715
    explain=True):
716 1981 aaronmk
    '''
717 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
718 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
719 1981 aaronmk
    @param fields Use None to select all fields in the table
720 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
721 2379 aaronmk
        * container can be any iterable type
722 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
723
        * compare_right_side: sql_gen.ValueCond|literal value
724 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
725
        use all columns
726 2786 aaronmk
    @return query
727 1981 aaronmk
    '''
728 2315 aaronmk
    # Parse tables param
729 2964 aaronmk
    tables = lists.mk_seq(tables)
730 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
731 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
732 2121 aaronmk
733 2315 aaronmk
    # Parse other params
734 2376 aaronmk
    if conds == None: conds = []
735 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
736 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
737 3129 aaronmk
    assert limit == None or isinstance(limit, (int, long))
738
    assert start == None or isinstance(start, (int, long))
739 5527 aaronmk
    if limit == 0: order_by = None
740 2315 aaronmk
    if order_by is order_by_pkey:
741 5641 aaronmk
        if lists.is_seq(distinct_on) and distinct_on: order_by = distinct_on[0]
742
        elif table0 != None: order_by = table_order_by(db, table0, recover=True)
743
        else: order_by = None
744 865 aaronmk
745 2315 aaronmk
    query = 'SELECT'
746 2056 aaronmk
747 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
748 2056 aaronmk
749 2200 aaronmk
    # DISTINCT ON columns
750 2233 aaronmk
    if distinct_on != []:
751 2467 aaronmk
        query += '\nDISTINCT'
752 2254 aaronmk
        if distinct_on is not distinct_on_all:
753 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
754
755
    # Columns
756 3185 aaronmk
    if query.find('\n') >= 0: whitespace = '\n'
757
    else: whitespace = ' '
758
    if fields == None: query += whitespace+'*'
759 2765 aaronmk
    else:
760
        assert fields != []
761 3185 aaronmk
        if len(fields) > 1: whitespace = '\n'
762
        query += whitespace+('\n, '.join(map(parse_col, fields)))
763 2200 aaronmk
764
    # Main table
765 3185 aaronmk
    if query.find('\n') >= 0 or len(tables) > 0: whitespace = '\n'
766
    else: whitespace = ' '
767 3420 aaronmk
    if table0 != None: query += whitespace+'FROM '+table0.to_str(db)
768 865 aaronmk
769 2122 aaronmk
    # Add joins
770 2271 aaronmk
    left_table = table0
771 2263 aaronmk
    for join_ in tables:
772
        table = join_.table
773 2238 aaronmk
774 2343 aaronmk
        # Parse special values
775
        if join_.type_ is sql_gen.filter_out: # filter no match
776 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
777 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
778 2343 aaronmk
779 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
780 2122 aaronmk
781
        left_table = table
782
783 865 aaronmk
    missing = True
784 2376 aaronmk
    if conds != []:
785 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
786
        else: whitespace = '\n'
787 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
788
            .to_str(db) for l, r in conds], 'WHERE')
789 2227 aaronmk
    if order_by != None:
790 5642 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by).to_str(db)
791 3297 aaronmk
    if limit != None: query += '\nLIMIT '+str(limit)
792 865 aaronmk
    if start != None:
793 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
794 865 aaronmk
795 3494 aaronmk
    if explain: query = with_explain_comment(db, query)
796 3266 aaronmk
797 2786 aaronmk
    return query
798 11 aaronmk
799 2054 aaronmk
def select(db, *args, **kw_args):
800
    '''For params, see mk_select() and run_query()'''
801
    recover = kw_args.pop('recover', None)
802
    cacheable = kw_args.pop('cacheable', True)
803 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
804 2054 aaronmk
805 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
806
        log_level=log_level)
807 2054 aaronmk
808 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
809 3181 aaronmk
    embeddable=False, ignore=False, src=None):
810 1960 aaronmk
    '''
811
    @param returning str|None An inserted column (such as pkey) to return
812 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
813 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
814
        query will be fully cached, not just if it raises an exception.
815 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
816 3181 aaronmk
    @param src Will be included in the name of any created function, to help
817
        identify the data source in pg_stat_activity.
818 1960 aaronmk
    '''
819 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
820 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
821 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
822 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
823 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
824 2063 aaronmk
825 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
826 2063 aaronmk
827 3009 aaronmk
    def mk_insert(select_query):
828
        query = first_line
829 3014 aaronmk
        if cols != None:
830
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
831 3009 aaronmk
        query += '\n'+select_query
832
833
        if returning != None:
834
            returning_name_col = sql_gen.to_name_only_col(returning)
835
            query += '\nRETURNING '+returning_name_col.to_str(db)
836
837
        return query
838 2063 aaronmk
839 3489 aaronmk
    return_type = sql_gen.CustomCode('unknown')
840
    if returning != None: return_type = sql_gen.ColType(returning)
841 3017 aaronmk
842 3009 aaronmk
    if ignore:
843 3017 aaronmk
        # Always return something to set the correct rowcount
844
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
845
846 3009 aaronmk
        embeddable = True # must use function
847 3010 aaronmk
848 3450 aaronmk
        if cols == None: row = [sql_gen.Col(sql_gen.all_cols, 'row')]
849
        else: row = [sql_gen.Col(c.name, 'row') for c in cols]
850 3092 aaronmk
851 3484 aaronmk
        query = sql_gen.RowExcIgnore(sql_gen.RowType(table), select_query,
852 3497 aaronmk
            sql_gen.ReturnQuery(mk_insert(sql_gen.Values(row).to_str(db))),
853
            cols)
854 3009 aaronmk
    else: query = mk_insert(select_query)
855
856 2070 aaronmk
    if embeddable:
857
        # Create function
858 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
859 3181 aaronmk
        if src != None: function_name = src+': '+function_name
860 2189 aaronmk
        while True:
861
            try:
862 3451 aaronmk
                func = db.TempFunction(function_name)
863 3489 aaronmk
                def_ = sql_gen.FunctionDef(func, sql_gen.SetOf(return_type),
864
                    query)
865 2194 aaronmk
866 3443 aaronmk
                run_query(db, def_.to_str(db), recover=True, cacheable=True,
867 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
868 2189 aaronmk
                break # this version was successful
869 2945 aaronmk
            except DuplicateException, e:
870 2189 aaronmk
                function_name = next_version(function_name)
871
                # try again with next version of name
872 2070 aaronmk
873 2337 aaronmk
        # Return query that uses function
874 3009 aaronmk
        cols = None
875
        if returning != None: cols = [returning]
876 3451 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(func), cols)
877
            # AS clause requires function alias
878 3298 aaronmk
        return mk_select(db, func_table, order_by=None)
879 2070 aaronmk
880 2787 aaronmk
    return query
881 2066 aaronmk
882 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
883 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
884 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
885
        values in
886 2072 aaronmk
    '''
887 3141 aaronmk
    returning = kw_args.get('returning', None)
888
    ignore = kw_args.get('ignore', False)
889
890 2386 aaronmk
    into = kw_args.pop('into', None)
891
    if into != None: kw_args['embeddable'] = True
892 2066 aaronmk
    recover = kw_args.pop('recover', None)
893 3141 aaronmk
    if ignore: recover = True
894 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
895 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
896 2066 aaronmk
897 3141 aaronmk
    rowcount_only = ignore and returning == None # keep NULL rows on server
898
    if rowcount_only: into = sql_gen.Table('rowcount')
899
900 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
901
        into, recover=recover, cacheable=cacheable, log_level=log_level)
902 3141 aaronmk
    if rowcount_only: empty_temp(db, into)
903 3074 aaronmk
    autoanalyze(db, table)
904
    return cur
905 2063 aaronmk
906 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
907 2066 aaronmk
908 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
909 2085 aaronmk
    '''For params, see insert_select()'''
910 5050 aaronmk
    ignore = kw_args.pop('ignore', False)
911 5094 aaronmk
    if ignore: kw_args.setdefault('recover', True)
912 5050 aaronmk
913 1960 aaronmk
    if lists.is_seq(row): cols = None
914
    else:
915
        cols = row.keys()
916
        row = row.values()
917 2738 aaronmk
    row = list(row) # ensure that "== []" works
918 1960 aaronmk
919 2738 aaronmk
    if row == []: query = None
920
    else: query = sql_gen.Values(row).to_str(db)
921 1961 aaronmk
922 5050 aaronmk
    try: return insert_select(db, table, cols, query, *args, **kw_args)
923 5057 aaronmk
    except (DuplicateKeyException, NullValueException):
924 5050 aaronmk
        if not ignore: raise
925 5163 aaronmk
        return None
926 11 aaronmk
927 3152 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False,
928 3153 aaronmk
    cacheable_=True):
929 2402 aaronmk
    '''
930
    @param changes [(col, new_value),...]
931
        * container can be any iterable type
932
        * col: sql_gen.Code|str (for col name)
933
        * new_value: sql_gen.Code|literal value
934
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
935 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
936
        This avoids creating dead rows in PostgreSQL.
937
        * cond must be None
938 3153 aaronmk
    @param cacheable_ Whether column structure information used to generate the
939 3152 aaronmk
        query can be cached
940 2402 aaronmk
    @return str query
941
    '''
942 3057 aaronmk
    table = sql_gen.as_Table(table)
943
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
944
        for c, v in changes]
945
946 3056 aaronmk
    if in_place:
947
        assert cond == None
948 3058 aaronmk
949 5398 aaronmk
        def col_type(col):
950
            return sql_gen.canon_type(db.col_info(
951
                sql_gen.with_default_table(c, table), cacheable_).type)
952
        changes = [(c, v, col_type(c)) for c, v in changes]
953 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
954 5396 aaronmk
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '+t+'\nUSING '
955
            +v.to_str(db) for c, v, t in changes))
956 3058 aaronmk
    else:
957
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
958
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
959
            for c, v in changes))
960
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
961 3056 aaronmk
962 3266 aaronmk
    query = with_explain_comment(db, query)
963
964 2402 aaronmk
    return query
965
966 3074 aaronmk
def update(db, table, *args, **kw_args):
967 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
968
    recover = kw_args.pop('recover', None)
969 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
970 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
971 2402 aaronmk
972 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
973
        cacheable, log_level=log_level)
974
    autoanalyze(db, table)
975
    return cur
976 2402 aaronmk
977 3286 aaronmk
def mk_delete(db, table, cond=None):
978
    '''
979
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
980
    @return str query
981
    '''
982
    query = 'DELETE FROM '+table.to_str(db)
983
    if cond != None: query += '\nWHERE '+cond.to_str(db)
984
985
    query = with_explain_comment(db, query)
986
987
    return query
988
989
def delete(db, table, *args, **kw_args):
990
    '''For params, see mk_delete() and run_query()'''
991
    recover = kw_args.pop('recover', None)
992 3295 aaronmk
    cacheable = kw_args.pop('cacheable', True)
993 3286 aaronmk
    log_level = kw_args.pop('log_level', 2)
994
995
    cur = run_query(db, mk_delete(db, table, *args, **kw_args), recover,
996
        cacheable, log_level=log_level)
997
    autoanalyze(db, table)
998
    return cur
999
1000 135 aaronmk
def last_insert_id(db):
1001 1849 aaronmk
    module = util.root_module(db.db)
1002 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
1003
    elif module == 'MySQLdb': return db.insert_id()
1004
    else: return None
1005 13 aaronmk
1006 3490 aaronmk
def define_func(db, def_):
1007
    func = def_.function
1008
    while True:
1009
        try:
1010
            run_query(db, def_.to_str(db), recover=True, cacheable=True,
1011
                log_ignore_excs=(DuplicateException,))
1012
            break # successful
1013
        except DuplicateException:
1014 3495 aaronmk
            func.name = next_version(func.name)
1015 3490 aaronmk
            # try again with next version of name
1016
1017 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
1018 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
1019 2415 aaronmk
    to names that will be distinct among the columns' tables.
1020 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
1021 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
1022
    @param into The table for the new columns.
1023 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
1024
        columns will be included in the mapping even if they are not in cols.
1025
        The tables of the provided Col objects will be changed to into, so make
1026
        copies of them if you want to keep the original tables.
1027
    @param as_items Whether to return a list of dict items instead of a dict
1028 2383 aaronmk
    @return dict(orig_col=new_col, ...)
1029
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
1030 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
1031
        * All mappings use the into table so its name can easily be
1032 2383 aaronmk
          changed for all columns at once
1033
    '''
1034 2415 aaronmk
    cols = lists.uniqify(cols)
1035
1036 2394 aaronmk
    items = []
1037 2389 aaronmk
    for col in preserve:
1038 2390 aaronmk
        orig_col = copy.copy(col)
1039 2392 aaronmk
        col.table = into
1040 2394 aaronmk
        items.append((orig_col, col))
1041
    preserve = set(preserve)
1042
    for col in cols:
1043 2716 aaronmk
        if col not in preserve:
1044 3750 aaronmk
            items.append((col, sql_gen.Col(strings.ustr(col), into, col.srcs)))
1045 2394 aaronmk
1046
    if not as_items: items = dict(items)
1047
    return items
1048 2383 aaronmk
1049 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
1050 2391 aaronmk
    '''For params, see mk_flatten_mapping()
1051
    @return See return value of mk_flatten_mapping()
1052
    '''
1053 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
1054
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
1055 5523 aaronmk
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
1056
        into=into, add_pkey_=True)
1057 3708 aaronmk
        # don't cache because the temp table will usually be truncated after use
1058 2394 aaronmk
    return dict(items)
1059 2391 aaronmk
1060 3079 aaronmk
##### Database structure introspection
1061 2414 aaronmk
1062 3079 aaronmk
#### Tables
1063
1064 4555 aaronmk
def tables(db, schema_like='public', table_like='%', exact=False,
1065
    cacheable=True):
1066 3079 aaronmk
    if exact: compare = '='
1067
    else: compare = 'LIKE'
1068
1069
    module = util.root_module(db.db)
1070
    if module == 'psycopg2':
1071
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1072
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1073
        return values(select(db, 'pg_tables', ['tablename'], conds,
1074 4555 aaronmk
            order_by='tablename', cacheable=cacheable, log_level=4))
1075 3079 aaronmk
    elif module == 'MySQLdb':
1076
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1077
            , cacheable=True, log_level=4))
1078
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1079
1080 4556 aaronmk
def table_exists(db, table, cacheable=True):
1081 3079 aaronmk
    table = sql_gen.as_Table(table)
1082 4556 aaronmk
    return list(tables(db, table.schema, table.name, True, cacheable)) != []
1083 3079 aaronmk
1084 2426 aaronmk
def table_row_count(db, table, recover=None):
1085 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
1086 3298 aaronmk
        order_by=None), recover=recover, log_level=3))
1087 2426 aaronmk
1088 5337 aaronmk
def table_col_names(db, table, recover=None):
1089 5528 aaronmk
    return list(col_names(select(db, table, limit=0, recover=recover,
1090
        log_level=4)))
1091 2414 aaronmk
1092 5383 aaronmk
def table_cols(db, table, *args, **kw_args):
1093
    return [sql_gen.as_Col(strings.ustr(c), table)
1094
        for c in table_col_names(db, table, *args, **kw_args)]
1095
1096 5521 aaronmk
def table_pkey_index(db, table, recover=None):
1097
    table_str = sql_gen.Literal(table.to_str(db))
1098
    try:
1099
        return sql_gen.Table(value(run_query(db, '''\
1100
SELECT relname
1101
FROM pg_index
1102
JOIN pg_class index ON index.oid = indexrelid
1103
WHERE
1104
indrelid = '''+table_str.to_str(db)+'''::regclass
1105
AND indisprimary
1106
'''
1107
            , recover, cacheable=True, log_level=4)), table.schema)
1108
    except StopIteration: raise DoesNotExistException('primary key', '')
1109
1110 5389 aaronmk
def table_pkey_col(db, table, recover=None):
1111 5061 aaronmk
    table = sql_gen.as_Table(table)
1112
1113
    join_cols = ['table_schema', 'table_name', 'constraint_schema',
1114
        'constraint_name']
1115
    tables = [sql_gen.Table('key_column_usage', 'information_schema'),
1116
        sql_gen.Join(sql_gen.Table('table_constraints', 'information_schema'),
1117
            dict(((c, sql_gen.join_same_not_null) for c in join_cols)))]
1118
    cols = [sql_gen.Col('column_name')]
1119
1120
    conds = [('constraint_type', 'PRIMARY KEY'), ('table_name', table.name)]
1121
    schema = table.schema
1122
    if schema != None: conds.append(('table_schema', schema))
1123
    order_by = 'position_in_unique_constraint'
1124
1125 5393 aaronmk
    try: return sql_gen.Col(value(select(db, tables, cols, conds,
1126
        order_by=order_by, limit=1, log_level=4)), table)
1127
    except StopIteration: raise DoesNotExistException('primary key', '')
1128 5389 aaronmk
1129
def pkey_name(db, table, recover=None):
1130
    '''If no pkey, returns the first column in the table.'''
1131 5392 aaronmk
    return pkey_col(db, table, recover).name
1132 832 aaronmk
1133 5390 aaronmk
def pkey_col(db, table, recover=None):
1134 5391 aaronmk
    '''If no pkey, returns the first column in the table.'''
1135 5392 aaronmk
    try: return table_pkey_col(db, table, recover)
1136 5393 aaronmk
    except DoesNotExistException: return table_cols(db, table, recover)[0]
1137 5128 aaronmk
1138 2559 aaronmk
not_null_col = 'not_null_col'
1139 2340 aaronmk
1140
def table_not_null_col(db, table, recover=None):
1141
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
1142 5337 aaronmk
    if not_null_col in table_col_names(db, table, recover): return not_null_col
1143 5388 aaronmk
    else: return pkey_name(db, table, recover)
1144 2340 aaronmk
1145 3348 aaronmk
def constraint_cond(db, constraint):
1146
    module = util.root_module(db.db)
1147
    if module == 'psycopg2':
1148
        table_str = sql_gen.Literal(constraint.table.to_str(db))
1149
        name_str = sql_gen.Literal(constraint.name)
1150
        return value(run_query(db, '''\
1151
SELECT consrc
1152
FROM pg_constraint
1153
WHERE
1154
conrelid = '''+table_str.to_str(db)+'''::regclass
1155
AND conname = '''+name_str.to_str(db)+'''
1156
'''
1157
            , cacheable=True, log_level=4))
1158 5443 aaronmk
    else: raise NotImplementedError("Can't get constraint condition for "
1159
        +module+' database')
1160 3348 aaronmk
1161 5520 aaronmk
def index_exprs(db, index):
1162 3322 aaronmk
    index = sql_gen.as_Table(index)
1163 1909 aaronmk
    module = util.root_module(db.db)
1164
    if module == 'psycopg2':
1165 3322 aaronmk
        qual_index = sql_gen.Literal(index.to_str(db))
1166 5520 aaronmk
        return list(values(run_query(db, '''\
1167 3322 aaronmk
SELECT pg_get_indexdef(indexrelid, generate_series(1, indnatts), true)
1168
FROM pg_index
1169
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1170 2782 aaronmk
'''
1171
            , cacheable=True, log_level=4)))
1172 5520 aaronmk
    else: raise NotImplementedError()
1173 853 aaronmk
1174 5520 aaronmk
def index_cols(db, index):
1175
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
1176
    automatically created. When you don't know whether something is a UNIQUE
1177
    constraint or a UNIQUE index, use this function.'''
1178
    return map(sql_gen.parse_expr_col, index_exprs(db, index))
1179
1180 5445 aaronmk
def index_cond(db, index):
1181
    index = sql_gen.as_Table(index)
1182
    module = util.root_module(db.db)
1183
    if module == 'psycopg2':
1184
        qual_index = sql_gen.Literal(index.to_str(db))
1185
        return value(run_query(db, '''\
1186
SELECT pg_get_expr(indpred, indrelid, true)
1187
FROM pg_index
1188
WHERE indexrelid = '''+qual_index.to_str(db)+'''::regclass
1189
'''
1190
            , cacheable=True, log_level=4))
1191
    else: raise NotImplementedError()
1192
1193 5521 aaronmk
def index_order_by(db, index):
1194
    return sql_gen.CustomCode(', '.join(index_exprs(db, index)))
1195
1196
def table_cluster_on(db, table, recover=None):
1197
    '''
1198
    @return The table's cluster index, or its pkey if none is set
1199
    '''
1200
    table_str = sql_gen.Literal(table.to_str(db))
1201
    try:
1202
        return sql_gen.Table(value(run_query(db, '''\
1203
SELECT relname
1204
FROM pg_index
1205
JOIN pg_class index ON index.oid = indexrelid
1206
WHERE
1207
indrelid = '''+table_str.to_str(db)+'''::regclass
1208
AND indisclustered
1209
'''
1210
            , recover, cacheable=True, log_level=4)), table.schema)
1211
    except StopIteration: return table_pkey_index(db, table, recover)
1212
1213
def table_order_by(db, table, recover=None):
1214 5525 aaronmk
    if table.order_by == None:
1215
        try: table.order_by = index_order_by(db, table_cluster_on(db, table,
1216
            recover))
1217
        except DoesNotExistException: pass
1218
    return table.order_by
1219 5521 aaronmk
1220 3079 aaronmk
#### Functions
1221
1222
def function_exists(db, function):
1223 3423 aaronmk
    qual_function = sql_gen.Literal(function.to_str(db))
1224
    try:
1225 3425 aaronmk
        select(db, fields=[sql_gen.Cast('regproc', qual_function)],
1226
            recover=True, cacheable=True, log_level=4)
1227 3423 aaronmk
    except DoesNotExistException: return False
1228 4146 aaronmk
    except DuplicateException: return True # overloaded function
1229 3423 aaronmk
    else: return True
1230 3079 aaronmk
1231 5713 aaronmk
def function_param0_type(db, function):
1232
    qual_function = sql_gen.Literal(function.to_str(db))
1233
    return value(run_query(db, '''\
1234
SELECT proargtypes[0]::regtype
1235
FROM pg_proc
1236
WHERE oid = '''+qual_function.to_str(db)+'''::regproc
1237
'''
1238
        , cacheable=True, log_level=4))
1239
1240 3079 aaronmk
##### Structural changes
1241
1242
#### Columns
1243
1244 5020 aaronmk
def add_col(db, table, col, comment=None, if_not_exists=False, **kw_args):
1245 3079 aaronmk
    '''
1246
    @param col TypedCol Name may be versioned, so be sure to propagate any
1247
        renaming back to any source column for the TypedCol.
1248
    @param comment None|str SQL comment used to distinguish columns of the same
1249
        name from each other when they contain different data, to allow the
1250
        ADD COLUMN query to be cached. If not set, query will not be cached.
1251
    '''
1252
    assert isinstance(col, sql_gen.TypedCol)
1253
1254
    while True:
1255
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1256
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1257
1258
        try:
1259
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1260
            break
1261
        except DuplicateException:
1262 5020 aaronmk
            if if_not_exists: raise
1263 3079 aaronmk
            col.name = next_version(col.name)
1264
            # try again with next version of name
1265
1266
def add_not_null(db, col):
1267
    table = col.table
1268
    col = sql_gen.to_name_only_col(col)
1269
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1270
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1271
1272 4443 aaronmk
def drop_not_null(db, col):
1273
    table = col.table
1274
    col = sql_gen.to_name_only_col(col)
1275
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1276
        +col.to_str(db)+' DROP NOT NULL', cacheable=True, log_level=3)
1277
1278 2096 aaronmk
row_num_col = '_row_num'
1279
1280 4997 aaronmk
row_num_col_def = sql_gen.TypedCol('', 'serial', nullable=False,
1281 3079 aaronmk
    constraints='PRIMARY KEY')
1282
1283 4997 aaronmk
def add_row_num(db, table, name=row_num_col):
1284
    '''Adds a row number column to a table. Its definition is in
1285
    row_num_col_def. It will be the primary key.'''
1286
    col_def = copy.copy(row_num_col_def)
1287
    col_def.name = name
1288 5021 aaronmk
    add_col(db, table, col_def, comment='', if_not_exists=True, log_level=3)
1289 3079 aaronmk
1290
#### Indexes
1291
1292
def add_pkey(db, table, cols=None, recover=None):
1293
    '''Adds a primary key.
1294
    @param cols [sql_gen.Col,...] The columns in the primary key.
1295
        Defaults to the first column in the table.
1296
    @pre The table must not already have a primary key.
1297
    '''
1298
    table = sql_gen.as_Table(table)
1299 5388 aaronmk
    if cols == None: cols = [pkey_name(db, table, recover)]
1300 3079 aaronmk
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1301
1302
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1303
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1304
        log_ignore_excs=(DuplicateException,))
1305
1306 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1307 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1308 3356 aaronmk
    Currently, only function calls and literal values are supported expressions.
1309 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1310 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1311 2538 aaronmk
    '''
1312 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1313 2538 aaronmk
1314 2688 aaronmk
    # Parse exprs
1315
    old_exprs = exprs[:]
1316
    exprs = []
1317
    cols = []
1318
    for i, expr in enumerate(old_exprs):
1319 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1320 2688 aaronmk
1321 2823 aaronmk
        # Handle nullable columns
1322 2998 aaronmk
        if ensure_not_null_:
1323 3164 aaronmk
            try: expr = sql_gen.ensure_not_null(db, expr)
1324 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1325 2823 aaronmk
1326 2688 aaronmk
        # Extract col
1327 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1328 3356 aaronmk
        col = expr
1329
        if isinstance(expr, sql_gen.FunctionCall): col = expr.args[0]
1330
        expr = sql_gen.cast_literal(expr)
1331
        if not isinstance(expr, (sql_gen.Expr, sql_gen.Col)):
1332 2688 aaronmk
            expr = sql_gen.Expr(expr)
1333 3356 aaronmk
1334 2688 aaronmk
1335
        # Extract table
1336
        if table == None:
1337
            assert sql_gen.is_table_col(col)
1338
            table = col.table
1339
1340 3356 aaronmk
        if isinstance(col, sql_gen.Col): col.table = None
1341 2688 aaronmk
1342
        exprs.append(expr)
1343
        cols.append(col)
1344 2408 aaronmk
1345 2688 aaronmk
    table = sql_gen.as_Table(table)
1346
1347 3005 aaronmk
    # Add index
1348 3148 aaronmk
    str_ = 'CREATE'
1349
    if unique: str_ += ' UNIQUE'
1350
    str_ += ' INDEX ON '+table.to_str(db)+' ('+(
1351
        ', '.join((v.to_str(db) for v in exprs)))+')'
1352
    run_query(db, str_, recover=True, cacheable=True, log_level=3)
1353 2408 aaronmk
1354 5765 aaronmk
def add_pkey_or_index(db, table, cols=None, recover=None, warn=False):
1355
    try: add_pkey(db, table, cols, recover)
1356
    except DuplicateKeyException, e:
1357
        if warn: warnings.warn(UserWarning(exc.str_(e)))
1358
        add_index(db, pkey_col(db, table), table)
1359
1360 3083 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1361
1362
def add_indexes(db, table, has_pkey=True):
1363
    '''Adds an index on all columns in a table.
1364
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1365
        index should be added on the first column.
1366
        * If already_indexed, the pkey is assumed to have already been added
1367
    '''
1368 5337 aaronmk
    cols = table_col_names(db, table)
1369 3083 aaronmk
    if has_pkey:
1370
        if has_pkey is not already_indexed: add_pkey(db, table)
1371
        cols = cols[1:]
1372
    for col in cols: add_index(db, col, table)
1373
1374 3079 aaronmk
#### Tables
1375 2772 aaronmk
1376 3079 aaronmk
### Maintenance
1377 2772 aaronmk
1378 3079 aaronmk
def analyze(db, table):
1379
    table = sql_gen.as_Table(table)
1380
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1381 2934 aaronmk
1382 3079 aaronmk
def autoanalyze(db, table):
1383
    if db.autoanalyze: analyze(db, table)
1384 2935 aaronmk
1385 3079 aaronmk
def vacuum(db, table):
1386
    table = sql_gen.as_Table(table)
1387
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1388
        log_level=3))
1389 2086 aaronmk
1390 3079 aaronmk
### Lifecycle
1391
1392 3247 aaronmk
def drop(db, type_, name):
1393
    name = sql_gen.as_Name(name)
1394
    run_query(db, 'DROP '+type_+' IF EXISTS '+name.to_str(db)+' CASCADE')
1395 2889 aaronmk
1396 3247 aaronmk
def drop_table(db, table): drop(db, 'TABLE', table)
1397
1398 3082 aaronmk
def create_table(db, table, cols=[], has_pkey=True, col_indexes=True,
1399
    like=None):
1400 2675 aaronmk
    '''Creates a table.
1401 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1402
    @param has_pkey If set, the first column becomes the primary key.
1403 2760 aaronmk
    @param col_indexes bool|[ref]
1404
        * If True, indexes will be added on all non-pkey columns.
1405
        * If a list reference, [0] will be set to a function to do this.
1406
          This can be used to delay index creation until the table is populated.
1407 2675 aaronmk
    '''
1408
    table = sql_gen.as_Table(table)
1409
1410 3082 aaronmk
    if like != None:
1411
        cols = [sql_gen.CustomCode('LIKE '+like.to_str(db)+' INCLUDING ALL')
1412
            ]+cols
1413 5525 aaronmk
        table.order_by = like.order_by
1414 2681 aaronmk
    if has_pkey:
1415
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1416 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1417 2681 aaronmk
1418 3085 aaronmk
    temp = table.is_temp and not db.debug_temp
1419
        # temp tables permanent in debug_temp mode
1420 2760 aaronmk
1421 3085 aaronmk
    # Create table
1422 3383 aaronmk
    def create():
1423 3085 aaronmk
        str_ = 'CREATE'
1424
        if temp: str_ += ' TEMP'
1425
        str_ += ' TABLE '+table.to_str(db)+' (\n'
1426
        str_ += '\n, '.join(c.to_str(db) for c in cols)
1427 3126 aaronmk
        str_ += '\n);'
1428 3085 aaronmk
1429 3383 aaronmk
        run_query(db, str_, recover=True, cacheable=True, log_level=2,
1430
            log_ignore_excs=(DuplicateException,))
1431
    if table.is_temp:
1432
        while True:
1433
            try:
1434
                create()
1435
                break
1436
            except DuplicateException:
1437
                table.name = next_version(table.name)
1438
                # try again with next version of name
1439
    else: create()
1440 3085 aaronmk
1441 2760 aaronmk
    # Add indexes
1442 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1443
    def add_indexes_(): add_indexes(db, table, has_pkey)
1444
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1445
    elif col_indexes: add_indexes_() # add now
1446 2675 aaronmk
1447 3084 aaronmk
def copy_table_struct(db, src, dest):
1448
    '''Creates a structure-only copy of a table. (Does not copy data.)'''
1449 3085 aaronmk
    create_table(db, dest, has_pkey=False, col_indexes=False, like=src)
1450 3084 aaronmk
1451 5529 aaronmk
def copy_table(db, src, dest):
1452
    '''Creates a copy of a table, including data'''
1453
    copy_table_struct(db, src, dest)
1454
    insert_select(db, dest, None, mk_select(db, src))
1455
1456 3079 aaronmk
### Data
1457 2684 aaronmk
1458 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1459
    '''For params, see run_query()'''
1460 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1461 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1462 2732 aaronmk
1463 2965 aaronmk
def empty_temp(db, tables):
1464
    tables = lists.mk_seq(tables)
1465 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1466 2965 aaronmk
1467 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1468
    '''For kw_args, see tables()'''
1469
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1470 3094 aaronmk
1471
def distinct_table(db, table, distinct_on):
1472
    '''Creates a copy of a temp table which is distinct on the given columns.
1473 3099 aaronmk
    The old and new tables will both get an index on these columns, to
1474
    facilitate merge joins.
1475 3097 aaronmk
    @param distinct_on If empty, creates a table with one row. This is useful if
1476
        your distinct_on columns are all literal values.
1477 3099 aaronmk
    @return The new table.
1478 3094 aaronmk
    '''
1479 3099 aaronmk
    new_table = sql_gen.suffixed_table(table, '_distinct')
1480 3411 aaronmk
    distinct_on = filter(sql_gen.is_table_col, distinct_on)
1481 3094 aaronmk
1482 3099 aaronmk
    copy_table_struct(db, table, new_table)
1483 3097 aaronmk
1484
    limit = None
1485
    if distinct_on == []: limit = 1 # one sample row
1486 3099 aaronmk
    else:
1487
        add_index(db, distinct_on, new_table, unique=True)
1488
        add_index(db, distinct_on, table) # for join optimization
1489 3097 aaronmk
1490 3313 aaronmk
    insert_select(db, new_table, None, mk_select(db, table, order_by=None,
1491
        limit=limit), ignore=True)
1492 3099 aaronmk
    analyze(db, new_table)
1493 3094 aaronmk
1494 3099 aaronmk
    return new_table