Project

General

Profile

1 11 aaronmk
# Database access
2
3 1869 aaronmk
import copy
4 2127 aaronmk
import operator
5 11 aaronmk
import re
6 865 aaronmk
import warnings
7 11 aaronmk
8 300 aaronmk
import exc
9 1909 aaronmk
import dicts
10 1893 aaronmk
import iters
11 1960 aaronmk
import lists
12 1889 aaronmk
from Proxy import Proxy
13 1872 aaronmk
import rand
14 2217 aaronmk
import sql_gen
15 862 aaronmk
import strings
16 131 aaronmk
import util
17 11 aaronmk
18 832 aaronmk
##### Exceptions
19
20 2804 aaronmk
def get_cur_query(cur, input_query=None):
21 2168 aaronmk
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24 2170 aaronmk
25
    if raw_query != None: return raw_query
26 2804 aaronmk
    else: return '[input] '+strings.ustr(input_query)
27 14 aaronmk
28 2170 aaronmk
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30 2771 aaronmk
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31 135 aaronmk
32 300 aaronmk
class DbException(exc.ExceptionWithCause):
33 14 aaronmk
    def __init__(self, msg, cause=None, cur=None):
34 2145 aaronmk
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35 14 aaronmk
        if cur != None: _add_cursor_info(self, cur)
36
37 2143 aaronmk
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39 2484 aaronmk
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40 2143 aaronmk
        self.name = name
41 360 aaronmk
42 2240 aaronmk
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44 2484 aaronmk
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46 2240 aaronmk
        self.name = name
47
        self.value = value
48
49 2945 aaronmk
class ExceptionWithNameType(DbException):
50
    def __init__(self, type_, name, cause=None):
51
        DbException.__init__(self, 'for type: '+strings.as_tt(str(type_))
52
            +'; name: '+strings.as_tt(name), cause)
53
        self.type = type_
54
        self.name = name
55
56 2306 aaronmk
class ConstraintException(DbException):
57
    def __init__(self, name, cols, cause=None):
58 2484 aaronmk
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
59
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
60 2306 aaronmk
        self.name = name
61 468 aaronmk
        self.cols = cols
62 11 aaronmk
63 2523 aaronmk
class MissingCastException(DbException):
64
    def __init__(self, type_, col, cause=None):
65
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
66
            +' on column: '+strings.as_tt(col), cause)
67
        self.type = type_
68
        self.col = col
69
70 2143 aaronmk
class NameException(DbException): pass
71
72 2306 aaronmk
class DuplicateKeyException(ConstraintException): pass
73 13 aaronmk
74 2306 aaronmk
class NullValueException(ConstraintException): pass
75 13 aaronmk
76 2240 aaronmk
class FunctionValueException(ExceptionWithNameValue): pass
77 2239 aaronmk
78 2945 aaronmk
class DuplicateException(ExceptionWithNameType): pass
79 2143 aaronmk
80 89 aaronmk
class EmptyRowException(DbException): pass
81
82 865 aaronmk
##### Warnings
83
84
class DbWarning(UserWarning): pass
85
86 1930 aaronmk
##### Result retrieval
87
88
def col_names(cur): return (col[0] for col in cur.description)
89
90
def rows(cur): return iter(lambda: cur.fetchone(), None)
91
92
def consume_rows(cur):
93
    '''Used to fetch all rows so result will be cached'''
94
    iters.consume_iter(rows(cur))
95
96
def next_row(cur): return rows(cur).next()
97
98
def row(cur):
99
    row_ = next_row(cur)
100
    consume_rows(cur)
101
    return row_
102
103
def next_value(cur): return next_row(cur)[0]
104
105
def value(cur): return row(cur)[0]
106
107
def values(cur): return iters.func_iter(lambda: next_value(cur))
108
109
def value_or_none(cur):
110
    try: return value(cur)
111
    except StopIteration: return None
112
113 2762 aaronmk
##### Escaping
114 2101 aaronmk
115 2573 aaronmk
def esc_name_by_module(module, name):
116
    if module == 'psycopg2' or module == None: quote = '"'
117 2101 aaronmk
    elif module == 'MySQLdb': quote = '`'
118
    else: raise NotImplementedError("Can't escape name for "+module+' database')
119 2500 aaronmk
    return sql_gen.esc_name(name, quote)
120 2101 aaronmk
121
def esc_name_by_engine(engine, name, **kw_args):
122
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
123
124
def esc_name(db, name, **kw_args):
125
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
126
127
def qual_name(db, schema, table):
128
    def esc_name_(name): return esc_name(db, name)
129
    table = esc_name_(table)
130
    if schema != None: return esc_name_(schema)+'.'+table
131
    else: return table
132
133 1869 aaronmk
##### Database connections
134 1849 aaronmk
135 2097 aaronmk
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
136 1926 aaronmk
137 1869 aaronmk
db_engines = {
138
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
139
    'PostgreSQL': ('psycopg2', {}),
140
}
141
142
DatabaseErrors_set = set([DbException])
143
DatabaseErrors = tuple(DatabaseErrors_set)
144
145
def _add_module(module):
146
    DatabaseErrors_set.add(module.DatabaseError)
147
    global DatabaseErrors
148
    DatabaseErrors = tuple(DatabaseErrors_set)
149
150
def db_config_str(db_config):
151
    return db_config['engine']+' database '+db_config['database']
152
153 2448 aaronmk
log_debug_none = lambda msg, level=2: None
154 1901 aaronmk
155 1849 aaronmk
class DbConn:
156 2923 aaronmk
    def __init__(self, db_config, autocommit=True, caching=True,
157 2915 aaronmk
        log_debug=log_debug_none, debug_temp=False):
158
        '''
159
        @param debug_temp Whether temporary objects should instead be permanent.
160
            This assists in debugging the internal objects used by the program.
161
        '''
162 1869 aaronmk
        self.db_config = db_config
163 2190 aaronmk
        self.autocommit = autocommit
164
        self.caching = caching
165 1901 aaronmk
        self.log_debug = log_debug
166 2193 aaronmk
        self.debug = log_debug != log_debug_none
167 2915 aaronmk
        self.debug_temp = debug_temp
168 3074 aaronmk
        self.autoanalyze = False
169 1869 aaronmk
170
        self.__db = None
171 1889 aaronmk
        self.query_results = {}
172 2139 aaronmk
        self._savepoint = 0
173 2671 aaronmk
        self._notices_seen = set()
174 1869 aaronmk
175
    def __getattr__(self, name):
176
        if name == '__dict__': raise Exception('getting __dict__')
177
        if name == 'db': return self._db()
178
        else: raise AttributeError()
179
180
    def __getstate__(self):
181
        state = copy.copy(self.__dict__) # shallow copy
182 1915 aaronmk
        state['log_debug'] = None # don't pickle the debug callback
183 1869 aaronmk
        state['_DbConn__db'] = None # don't pickle the connection
184
        return state
185
186 2165 aaronmk
    def connected(self): return self.__db != None
187
188 1869 aaronmk
    def _db(self):
189
        if self.__db == None:
190
            # Process db_config
191
            db_config = self.db_config.copy() # don't modify input!
192 2097 aaronmk
            schemas = db_config.pop('schemas', None)
193 1869 aaronmk
            module_name, mappings = db_engines[db_config.pop('engine')]
194
            module = __import__(module_name)
195
            _add_module(module)
196
            for orig, new in mappings.iteritems():
197
                try: util.rename_key(db_config, orig, new)
198
                except KeyError: pass
199
200
            # Connect
201
            self.__db = module.connect(**db_config)
202
203
            # Configure connection
204 2906 aaronmk
            if hasattr(self.db, 'set_isolation_level'):
205
                import psycopg2.extensions
206
                self.db.set_isolation_level(
207
                    psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
208 2101 aaronmk
            if schemas != None:
209 2893 aaronmk
                search_path = [self.esc_name(s) for s in schemas.split(',')]
210
                search_path.append(value(run_query(self, 'SHOW search_path',
211
                    log_level=4)))
212
                run_query(self, 'SET search_path TO '+(','.join(search_path)),
213
                    log_level=3)
214 1869 aaronmk
215
        return self.__db
216 1889 aaronmk
217 1891 aaronmk
    class DbCursor(Proxy):
218 1927 aaronmk
        def __init__(self, outer):
219 1891 aaronmk
            Proxy.__init__(self, outer.db.cursor())
220 2191 aaronmk
            self.outer = outer
221 1927 aaronmk
            self.query_results = outer.query_results
222 1894 aaronmk
            self.query_lookup = None
223 1891 aaronmk
            self.result = []
224 1889 aaronmk
225 2802 aaronmk
        def execute(self, query):
226 2764 aaronmk
            self._is_insert = query.startswith('INSERT')
227 2797 aaronmk
            self.query_lookup = query
228 2148 aaronmk
            try:
229 2191 aaronmk
                try:
230 2802 aaronmk
                    cur = self.inner.execute(query)
231 2191 aaronmk
                    self.outer.do_autocommit()
232 2802 aaronmk
                finally: self.query = get_cur_query(self.inner, query)
233 1904 aaronmk
            except Exception, e:
234 2802 aaronmk
                _add_cursor_info(e, self, query)
235 1904 aaronmk
                self.result = e # cache the exception as the result
236
                self._cache_result()
237
                raise
238 3004 aaronmk
239
            # Always cache certain queries
240
            if query.startswith('CREATE') or query.startswith('ALTER'):
241 3007 aaronmk
                # structural changes
242 3040 aaronmk
                # Rest of query must be unique in the face of name collisions,
243
                # so don't cache ADD COLUMN unless it has distinguishing comment
244
                if query.find('ADD COLUMN') < 0 or query.endswith('*/'):
245 3007 aaronmk
                    self._cache_result()
246 3004 aaronmk
            elif self.rowcount == 0 and query.startswith('SELECT'): # empty
247 2800 aaronmk
                consume_rows(self) # fetch all rows so result will be cached
248 3004 aaronmk
249 2762 aaronmk
            return cur
250 1894 aaronmk
251 1891 aaronmk
        def fetchone(self):
252
            row = self.inner.fetchone()
253 1899 aaronmk
            if row != None: self.result.append(row)
254
            # otherwise, fetched all rows
255 1904 aaronmk
            else: self._cache_result()
256
            return row
257
258
        def _cache_result(self):
259 2948 aaronmk
            # For inserts that return a result set, don't cache result set since
260
            # inserts are not idempotent. Other non-SELECT queries don't have
261
            # their result set read, so only exceptions will be cached (an
262
            # invalid query will always be invalid).
263 1930 aaronmk
            if self.query_results != None and (not self._is_insert
264 1906 aaronmk
                or isinstance(self.result, Exception)):
265
266 1894 aaronmk
                assert self.query_lookup != None
267 1916 aaronmk
                self.query_results[self.query_lookup] = self.CacheCursor(
268
                    util.dict_subset(dicts.AttrsDictView(self),
269
                    ['query', 'result', 'rowcount', 'description']))
270 1906 aaronmk
271 1916 aaronmk
        class CacheCursor:
272
            def __init__(self, cached_result): self.__dict__ = cached_result
273
274 1927 aaronmk
            def execute(self, *args, **kw_args):
275 1916 aaronmk
                if isinstance(self.result, Exception): raise self.result
276
                # otherwise, result is a rows list
277
                self.iter = iter(self.result)
278
279
            def fetchone(self):
280
                try: return self.iter.next()
281
                except StopIteration: return None
282 1891 aaronmk
283 2212 aaronmk
    def esc_value(self, value):
284 2663 aaronmk
        try: str_ = self.mogrify('%s', [value])
285
        except NotImplementedError, e:
286
            module = util.root_module(self.db)
287
            if module == 'MySQLdb':
288
                import _mysql
289
                str_ = _mysql.escape_string(value)
290
            else: raise e
291 2374 aaronmk
        return strings.to_unicode(str_)
292 2212 aaronmk
293 2347 aaronmk
    def esc_name(self, name): return esc_name(self, name) # calls global func
294
295 2814 aaronmk
    def std_code(self, str_):
296
        '''Standardizes SQL code.
297
        * Ensures that string literals are prefixed by `E`
298
        '''
299
        if str_.startswith("'"): str_ = 'E'+str_
300
        return str_
301
302 2665 aaronmk
    def can_mogrify(self):
303 2663 aaronmk
        module = util.root_module(self.db)
304 2665 aaronmk
        return module == 'psycopg2'
305 2663 aaronmk
306 2665 aaronmk
    def mogrify(self, query, params=None):
307
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
308
        else: raise NotImplementedError("Can't mogrify query")
309
310 2671 aaronmk
    def print_notices(self):
311 2725 aaronmk
        if hasattr(self.db, 'notices'):
312
            for msg in self.db.notices:
313
                if msg not in self._notices_seen:
314
                    self._notices_seen.add(msg)
315
                    self.log_debug(msg, level=2)
316 2671 aaronmk
317 2793 aaronmk
    def run_query(self, query, cacheable=False, log_level=2,
318 2464 aaronmk
        debug_msg_ref=None):
319 2445 aaronmk
        '''
320 2464 aaronmk
        @param log_ignore_excs The log_level will be increased by 2 if the query
321
            throws one of these exceptions.
322 2664 aaronmk
        @param debug_msg_ref If specified, the log message will be returned in
323
            this instead of being output. This allows you to filter log messages
324
            depending on the result of the query.
325 2445 aaronmk
        '''
326 2167 aaronmk
        assert query != None
327
328 2047 aaronmk
        if not self.caching: cacheable = False
329 1903 aaronmk
        used_cache = False
330 2664 aaronmk
331
        def log_msg(query):
332
            if used_cache: cache_status = 'cache hit'
333
            elif cacheable: cache_status = 'cache miss'
334
            else: cache_status = 'non-cacheable'
335
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
336
337 1903 aaronmk
        try:
338 1927 aaronmk
            # Get cursor
339
            if cacheable:
340
                try:
341 2797 aaronmk
                    cur = self.query_results[query]
342 1927 aaronmk
                    used_cache = True
343
                except KeyError: cur = self.DbCursor(self)
344
            else: cur = self.db.cursor()
345
346 2664 aaronmk
            # Log query
347
            if self.debug and debug_msg_ref == None: # log before running
348
                self.log_debug(log_msg(query), log_level)
349
350 1927 aaronmk
            # Run query
351 2793 aaronmk
            cur.execute(query)
352 1903 aaronmk
        finally:
353 2671 aaronmk
            self.print_notices()
354 2664 aaronmk
            if self.debug and debug_msg_ref != None: # return after running
355 2793 aaronmk
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
356 1903 aaronmk
357
        return cur
358 1914 aaronmk
359 2797 aaronmk
    def is_cached(self, query): return query in self.query_results
360 2139 aaronmk
361 2907 aaronmk
    def with_autocommit(self, func):
362 2801 aaronmk
        import psycopg2.extensions
363
364
        prev_isolation_level = self.db.isolation_level
365 2907 aaronmk
        self.db.set_isolation_level(
366
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
367 2683 aaronmk
        try: return func()
368 2801 aaronmk
        finally: self.db.set_isolation_level(prev_isolation_level)
369 2683 aaronmk
370 2139 aaronmk
    def with_savepoint(self, func):
371 2171 aaronmk
        savepoint = 'level_'+str(self._savepoint)
372 2443 aaronmk
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
373 2139 aaronmk
        self._savepoint += 1
374 2930 aaronmk
        try: return func()
375 2139 aaronmk
        except:
376 2443 aaronmk
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
377 2139 aaronmk
            raise
378 2930 aaronmk
        finally:
379
            # Always release savepoint, because after ROLLBACK TO SAVEPOINT,
380
            # "The savepoint remains valid and can be rolled back to again"
381
            # (http://www.postgresql.org/docs/8.3/static/sql-rollback-to.html).
382 2443 aaronmk
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
383 2930 aaronmk
384
            self._savepoint -= 1
385
            assert self._savepoint >= 0
386
387
            self.do_autocommit() # OK to do this after ROLLBACK TO SAVEPOINT
388 2191 aaronmk
389
    def do_autocommit(self):
390
        '''Autocommits if outside savepoint'''
391
        assert self._savepoint >= 0
392
        if self.autocommit and self._savepoint == 0:
393 2924 aaronmk
            self.log_debug('Autocommitting', level=4)
394 2191 aaronmk
            self.db.commit()
395 2643 aaronmk
396 2819 aaronmk
    def col_info(self, col):
397 2643 aaronmk
        table = sql_gen.Table('columns', 'information_schema')
398 3063 aaronmk
        type_ = sql_gen.Coalesce(sql_gen.Nullif(sql_gen.Col('data_type'),
399
            'USER-DEFINED'), sql_gen.Col('udt_name'))
400
        cols = [type_, 'column_default', cast(self, 'boolean', 'is_nullable')]
401 2643 aaronmk
402
        conds = [('table_name', col.table.name), ('column_name', col.name)]
403
        schema = col.table.schema
404
        if schema != None: conds.append(('table_schema', schema))
405
406 2819 aaronmk
        type_, default, nullable = row(select(self, table, cols, conds,
407 3059 aaronmk
            order_by='table_schema', limit=1, cacheable=False, log_level=4))
408 2643 aaronmk
            # TODO: order_by search_path schema order
409 2819 aaronmk
        default = sql_gen.as_Code(default, self)
410
411
        return sql_gen.TypedCol(col.name, type_, default, nullable)
412 2917 aaronmk
413
    def TempFunction(self, name):
414
        if self.debug_temp: schema = None
415
        else: schema = 'pg_temp'
416
        return sql_gen.Function(name, schema)
417 1849 aaronmk
418 1869 aaronmk
connect = DbConn
419
420 832 aaronmk
##### Recoverable querying
421 15 aaronmk
422 2139 aaronmk
def with_savepoint(db, func): return db.with_savepoint(func)
423 11 aaronmk
424 2791 aaronmk
def run_query(db, query, recover=None, cacheable=False, log_level=2,
425
    log_ignore_excs=None, **kw_args):
426 2794 aaronmk
    '''For params, see DbConn.run_query()'''
427 830 aaronmk
    if recover == None: recover = False
428 2464 aaronmk
    if log_ignore_excs == None: log_ignore_excs = ()
429
    log_ignore_excs = tuple(log_ignore_excs)
430 830 aaronmk
431 2666 aaronmk
    debug_msg_ref = None # usually, db.run_query() logs query before running it
432
    # But if filtering with log_ignore_excs, wait until after exception parsing
433 2984 aaronmk
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None]
434 2666 aaronmk
435 2148 aaronmk
    try:
436 2464 aaronmk
        try:
437 2794 aaronmk
            def run(): return db.run_query(query, cacheable, log_level,
438 2793 aaronmk
                debug_msg_ref, **kw_args)
439 2796 aaronmk
            if recover and not db.is_cached(query):
440 2464 aaronmk
                return with_savepoint(db, run)
441
            else: return run() # don't need savepoint if cached
442
        except Exception, e:
443
            msg = exc.str_(e)
444
445
            match = re.search(r'duplicate key value violates unique constraint '
446 2493 aaronmk
                r'"((_?[^\W_]+)_.+?)"', msg)
447 2464 aaronmk
            if match:
448
                constraint, table = match.groups()
449 3025 aaronmk
                cols = []
450
                if recover: # need auto-rollback to run index_cols()
451
                    try: cols = index_cols(db, table, constraint)
452
                    except NotImplementedError: pass
453
                raise DuplicateKeyException(constraint, cols, e)
454 2464 aaronmk
455 2493 aaronmk
            match = re.search(r'null value in column "(.+?)" violates not-null'
456 2464 aaronmk
                r' constraint', msg)
457
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
458
459
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
460
                r'|date/time field value out of range): "(.+?)"\n'
461 2535 aaronmk
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
462 2464 aaronmk
            if match:
463
                value, name = match.groups()
464
                raise FunctionValueException(name, strings.to_unicode(value), e)
465
466 2526 aaronmk
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
467 2523 aaronmk
                r'is of type', msg)
468
            if match:
469
                col, type_ = match.groups()
470
                raise MissingCastException(type_, col, e)
471
472 2946 aaronmk
            match = re.search(r'\b(\S+) "(.+?)".*? already exists', msg)
473 2945 aaronmk
            if match:
474
                type_, name = match.groups()
475
                raise DuplicateException(type_, name, e)
476 2464 aaronmk
477
            raise # no specific exception raised
478
    except log_ignore_excs:
479
        log_level += 2
480
        raise
481
    finally:
482 2666 aaronmk
        if debug_msg_ref != None and debug_msg_ref[0] != None:
483
            db.log_debug(debug_msg_ref[0], log_level)
484 830 aaronmk
485 832 aaronmk
##### Basic queries
486
487 2153 aaronmk
def next_version(name):
488 2163 aaronmk
    version = 1 # first existing name was version 0
489 2586 aaronmk
    match = re.match(r'^(.*)#(\d+)$', name)
490 2153 aaronmk
    if match:
491 2586 aaronmk
        name, version = match.groups()
492
        version = int(version)+1
493 2932 aaronmk
    return sql_gen.concat(name, '#'+str(version))
494 2153 aaronmk
495 2899 aaronmk
def lock_table(db, table, mode):
496
    table = sql_gen.as_Table(table)
497
    run_query(db, 'LOCK TABLE '+table.to_str(db)+' IN '+mode+' MODE')
498
499 2789 aaronmk
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
500 2085 aaronmk
    '''Outputs a query to a temp table.
501
    For params, see run_query().
502
    '''
503 2789 aaronmk
    if into == None: return run_query(db, query, **kw_args)
504 2790 aaronmk
505
    assert isinstance(into, sql_gen.Table)
506
507 2992 aaronmk
    into.is_temp = True
508 3008 aaronmk
    # "temporary tables cannot specify a schema name", so remove schema
509
    into.schema = None
510 2992 aaronmk
511 2790 aaronmk
    kw_args['recover'] = True
512 2945 aaronmk
    kw_args.setdefault('log_ignore_excs', (DuplicateException,))
513 2790 aaronmk
514 2916 aaronmk
    temp = not db.debug_temp # tables are permanent in debug_temp mode
515 2790 aaronmk
516
    # Create table
517
    while True:
518
        create_query = 'CREATE'
519
        if temp: create_query += ' TEMP'
520
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
521 2385 aaronmk
522 2790 aaronmk
        try:
523
            cur = run_query(db, create_query, **kw_args)
524
                # CREATE TABLE AS sets rowcount to # rows in query
525
            break
526 2945 aaronmk
        except DuplicateException, e:
527 2790 aaronmk
            into.name = next_version(into.name)
528
            # try again with next version of name
529
530
    if add_indexes_: add_indexes(db, into)
531 3075 aaronmk
532
    # According to the PostgreSQL doc, "The autovacuum daemon cannot access and
533
    # therefore cannot vacuum or analyze temporary tables. [...] if a temporary
534
    # table is going to be used in complex queries, it is wise to run ANALYZE on
535
    # the temporary table after it is populated."
536
    # (http://www.postgresql.org/docs/9.1/static/sql-createtable.html)
537
    # If into is not a temp table, ANALYZE is useful but not required.
538 3073 aaronmk
    analyze(db, into)
539 2790 aaronmk
540
    return cur
541 2085 aaronmk
542 2120 aaronmk
order_by_pkey = object() # tells mk_select() to order by the pkey
543
544 2199 aaronmk
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
545
546 2233 aaronmk
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
547 2293 aaronmk
    start=None, order_by=order_by_pkey, default_table=None):
548 1981 aaronmk
    '''
549 2121 aaronmk
    @param tables The single table to select from, or a list of tables to join
550 2280 aaronmk
        together, with tables after the first being sql_gen.Join objects
551 1981 aaronmk
    @param fields Use None to select all fields in the table
552 2377 aaronmk
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
553 2379 aaronmk
        * container can be any iterable type
554 2399 aaronmk
        * compare_left_side: sql_gen.Code|str (for col name)
555
        * compare_right_side: sql_gen.ValueCond|literal value
556 2199 aaronmk
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
557
        use all columns
558 2786 aaronmk
    @return query
559 1981 aaronmk
    '''
560 2315 aaronmk
    # Parse tables param
561 2964 aaronmk
    tables = lists.mk_seq(tables)
562 2141 aaronmk
    tables = list(tables) # don't modify input! (list() copies input)
563 2315 aaronmk
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
564 2121 aaronmk
565 2315 aaronmk
    # Parse other params
566 2376 aaronmk
    if conds == None: conds = []
567 2650 aaronmk
    elif dicts.is_dict(conds): conds = conds.items()
568 2379 aaronmk
    conds = list(conds) # don't modify input! (list() copies input)
569 135 aaronmk
    assert limit == None or type(limit) == int
570 865 aaronmk
    assert start == None or type(start) == int
571 2315 aaronmk
    if order_by is order_by_pkey:
572
        if distinct_on != []: order_by = None
573
        else: order_by = pkey(db, table0, recover=True)
574 865 aaronmk
575 2315 aaronmk
    query = 'SELECT'
576 2056 aaronmk
577 2315 aaronmk
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
578 2056 aaronmk
579 2200 aaronmk
    # DISTINCT ON columns
580 2233 aaronmk
    if distinct_on != []:
581 2467 aaronmk
        query += '\nDISTINCT'
582 2254 aaronmk
        if distinct_on is not distinct_on_all:
583 2200 aaronmk
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
584
585
    # Columns
586 3027 aaronmk
    if fields == None:
587
        if query.find('\n') >= 0: whitespace = '\n'
588
        else: whitespace = ' '
589
        query += whitespace+'*'
590 2765 aaronmk
    else:
591
        assert fields != []
592 3027 aaronmk
        query += '\n'+('\n, '.join(map(parse_col, fields)))
593 2200 aaronmk
594
    # Main table
595 2467 aaronmk
    query += '\nFROM '+table0.to_str(db)
596 865 aaronmk
597 2122 aaronmk
    # Add joins
598 2271 aaronmk
    left_table = table0
599 2263 aaronmk
    for join_ in tables:
600
        table = join_.table
601 2238 aaronmk
602 2343 aaronmk
        # Parse special values
603
        if join_.type_ is sql_gen.filter_out: # filter no match
604 2376 aaronmk
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
605 2853 aaronmk
                sql_gen.CompareCond(None, '~=')))
606 2343 aaronmk
607 2467 aaronmk
        query += '\n'+join_.to_str(db, left_table)
608 2122 aaronmk
609
        left_table = table
610
611 865 aaronmk
    missing = True
612 2376 aaronmk
    if conds != []:
613 2576 aaronmk
        if len(conds) == 1: whitespace = ' '
614
        else: whitespace = '\n'
615 2578 aaronmk
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
616
            .to_str(db) for l, r in conds], 'WHERE')
617 865 aaronmk
        missing = False
618 2227 aaronmk
    if order_by != None:
619 2467 aaronmk
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
620
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
621 865 aaronmk
    if start != None:
622 2467 aaronmk
        if start != 0: query += '\nOFFSET '+str(start)
623 865 aaronmk
        missing = False
624
    if missing: warnings.warn(DbWarning(
625
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
626
627 2786 aaronmk
    return query
628 11 aaronmk
629 2054 aaronmk
def select(db, *args, **kw_args):
630
    '''For params, see mk_select() and run_query()'''
631
    recover = kw_args.pop('recover', None)
632
    cacheable = kw_args.pop('cacheable', True)
633 2442 aaronmk
    log_level = kw_args.pop('log_level', 2)
634 2054 aaronmk
635 2791 aaronmk
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
636
        log_level=log_level)
637 2054 aaronmk
638 2788 aaronmk
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
639 3009 aaronmk
    embeddable=False, ignore=False):
640 1960 aaronmk
    '''
641
    @param returning str|None An inserted column (such as pkey) to return
642 2070 aaronmk
    @param embeddable Whether the query should be embeddable as a nested SELECT.
643 2073 aaronmk
        Warning: If you set this and cacheable=True when the query is run, the
644
        query will be fully cached, not just if it raises an exception.
645 3009 aaronmk
    @param ignore Whether to ignore duplicate keys.
646 1960 aaronmk
    '''
647 2754 aaronmk
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
648 2318 aaronmk
    if cols == []: cols = None # no cols (all defaults) = unknown col names
649 3010 aaronmk
    if cols != None: cols = [sql_gen.to_name_only_col(c, table) for c in cols]
650 2063 aaronmk
    if select_query == None: select_query = 'DEFAULT VALUES'
651 2327 aaronmk
    if returning != None: returning = sql_gen.as_Col(returning, table)
652 2063 aaronmk
653 2497 aaronmk
    first_line = 'INSERT INTO '+table.to_str(db)
654 2063 aaronmk
655 3009 aaronmk
    def mk_insert(select_query):
656
        query = first_line
657 3014 aaronmk
        if cols != None:
658
            query += '\n('+(', '.join((c.to_str(db) for c in cols)))+')'
659 3009 aaronmk
        query += '\n'+select_query
660
661
        if returning != None:
662
            returning_name_col = sql_gen.to_name_only_col(returning)
663
            query += '\nRETURNING '+returning_name_col.to_str(db)
664
665
        return query
666 2063 aaronmk
667 3017 aaronmk
    return_type = 'unknown'
668
    if returning != None: return_type = returning.to_str(db)+'%TYPE'
669
670 3009 aaronmk
    lang = 'sql'
671
    if ignore:
672 3017 aaronmk
        assert cols != None
673
        # Always return something to set the correct rowcount
674
        if returning == None: returning = sql_gen.NamedCol('NULL', None)
675
676 3009 aaronmk
        embeddable = True # must use function
677
        lang = 'plpgsql'
678 3014 aaronmk
        row = [sql_gen.Col(c.name, 'row') for c in cols]
679 3010 aaronmk
680 3009 aaronmk
        query = '''\
681 3010 aaronmk
DECLARE
682 3014 aaronmk
    row '''+table.to_str(db)+'''%ROWTYPE;
683 3009 aaronmk
BEGIN
684 3019 aaronmk
    /* Need an EXCEPTION block for each individual row because "When an error is
685
    caught by an EXCEPTION clause, [...] all changes to persistent database
686
    state within the block are rolled back."
687
    This is unfortunate because "A block containing an EXCEPTION clause is
688
    significantly more expensive to enter and exit than a block without one."
689 3015 aaronmk
    (http://www.postgresql.org/docs/8.3/static/plpgsql-control-structures.html\
690
#PLPGSQL-ERROR-TRAPPING)
691
    */
692 3034 aaronmk
    FOR '''+(', '.join((c.to_str(db) for c in row)))+''' IN
693
'''+select_query+'''
694
    LOOP
695 3015 aaronmk
        BEGIN
696 3019 aaronmk
            RETURN QUERY
697 3014 aaronmk
'''+mk_insert(sql_gen.Values(row).to_str(db))+'''
698 3010 aaronmk
;
699 3015 aaronmk
        EXCEPTION
700 3019 aaronmk
            WHEN unique_violation THEN NULL; -- continue to next row
701 3015 aaronmk
        END;
702 3010 aaronmk
    END LOOP;
703
END;\
704 3009 aaronmk
'''
705
    else: query = mk_insert(select_query)
706
707 2070 aaronmk
    if embeddable:
708
        # Create function
709 2513 aaronmk
        function_name = sql_gen.clean_name(first_line)
710 2189 aaronmk
        while True:
711
            try:
712 2918 aaronmk
                function = db.TempFunction(function_name)
713 2194 aaronmk
714 2189 aaronmk
                function_query = '''\
715 2698 aaronmk
CREATE FUNCTION '''+function.to_str(db)+'''()
716 3017 aaronmk
RETURNS SETOF '''+return_type+'''
717 3009 aaronmk
LANGUAGE '''+lang+'''
718 2467 aaronmk
AS $$
719 3009 aaronmk
'''+query+'''
720 2467 aaronmk
$$;
721 2070 aaronmk
'''
722 2446 aaronmk
                run_query(db, function_query, recover=True, cacheable=True,
723 2945 aaronmk
                    log_ignore_excs=(DuplicateException,))
724 2189 aaronmk
                break # this version was successful
725 2945 aaronmk
            except DuplicateException, e:
726 2189 aaronmk
                function_name = next_version(function_name)
727
                # try again with next version of name
728 2070 aaronmk
729 2337 aaronmk
        # Return query that uses function
730 3009 aaronmk
        cols = None
731
        if returning != None: cols = [returning]
732 2698 aaronmk
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
733 3009 aaronmk
            cols) # AS clause requires function alias
734 2787 aaronmk
        return mk_select(db, func_table, start=0, order_by=None)
735 2070 aaronmk
736 2787 aaronmk
    return query
737 2066 aaronmk
738 3074 aaronmk
def insert_select(db, table, *args, **kw_args):
739 2085 aaronmk
    '''For params, see mk_insert_select() and run_query_into()
740 2386 aaronmk
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
741
        values in
742 2072 aaronmk
    '''
743 2386 aaronmk
    into = kw_args.pop('into', None)
744
    if into != None: kw_args['embeddable'] = True
745 2066 aaronmk
    recover = kw_args.pop('recover', None)
746 3011 aaronmk
    if kw_args.get('ignore', False): recover = True
747 2066 aaronmk
    cacheable = kw_args.pop('cacheable', True)
748 2673 aaronmk
    log_level = kw_args.pop('log_level', 2)
749 2066 aaronmk
750 3074 aaronmk
    cur = run_query_into(db, mk_insert_select(db, table, *args, **kw_args),
751
        into, recover=recover, cacheable=cacheable, log_level=log_level)
752
    autoanalyze(db, table)
753
    return cur
754 2063 aaronmk
755 2738 aaronmk
default = sql_gen.default # tells insert() to use the default value for a column
756 2066 aaronmk
757 2063 aaronmk
def insert(db, table, row, *args, **kw_args):
758 2085 aaronmk
    '''For params, see insert_select()'''
759 1960 aaronmk
    if lists.is_seq(row): cols = None
760
    else:
761
        cols = row.keys()
762
        row = row.values()
763 2738 aaronmk
    row = list(row) # ensure that "== []" works
764 1960 aaronmk
765 2738 aaronmk
    if row == []: query = None
766
    else: query = sql_gen.Values(row).to_str(db)
767 1961 aaronmk
768 2788 aaronmk
    return insert_select(db, table, cols, query, *args, **kw_args)
769 11 aaronmk
770 3056 aaronmk
def mk_update(db, table, changes=None, cond=None, in_place=False):
771 2402 aaronmk
    '''
772
    @param changes [(col, new_value),...]
773
        * container can be any iterable type
774
        * col: sql_gen.Code|str (for col name)
775
        * new_value: sql_gen.Code|literal value
776
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
777 3056 aaronmk
    @param in_place If set, locks the table and updates rows in place.
778
        This avoids creating dead rows in PostgreSQL.
779
        * cond must be None
780 2402 aaronmk
    @return str query
781
    '''
782 3057 aaronmk
    table = sql_gen.as_Table(table)
783
    changes = [(sql_gen.to_name_only_col(c, table), sql_gen.as_Value(v))
784
        for c, v in changes]
785
786 3056 aaronmk
    if in_place:
787
        assert cond == None
788 3058 aaronmk
789 3065 aaronmk
        query = 'ALTER TABLE '+table.to_str(db)+'\n'
790
        query += ',\n'.join(('ALTER COLUMN '+c.to_str(db)+' TYPE '
791
            +db.col_info(sql_gen.with_default_table(c, table)).type
792
            +'\nUSING '+v.to_str(db) for c, v in changes))
793 3058 aaronmk
    else:
794
        query = 'UPDATE '+table.to_str(db)+'\nSET\n'
795
        query += ',\n'.join((c.to_str(db)+' = '+v.to_str(db)
796
            for c, v in changes))
797
        if cond != None: query += '\nWHERE\n'+cond.to_str(db)
798 3056 aaronmk
799 2402 aaronmk
    return query
800
801 3074 aaronmk
def update(db, table, *args, **kw_args):
802 2402 aaronmk
    '''For params, see mk_update() and run_query()'''
803
    recover = kw_args.pop('recover', None)
804 3043 aaronmk
    cacheable = kw_args.pop('cacheable', False)
805 3030 aaronmk
    log_level = kw_args.pop('log_level', 2)
806 2402 aaronmk
807 3074 aaronmk
    cur = run_query(db, mk_update(db, table, *args, **kw_args), recover,
808
        cacheable, log_level=log_level)
809
    autoanalyze(db, table)
810
    return cur
811 2402 aaronmk
812 135 aaronmk
def last_insert_id(db):
813 1849 aaronmk
    module = util.root_module(db.db)
814 135 aaronmk
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
815
    elif module == 'MySQLdb': return db.insert_id()
816
    else: return None
817 13 aaronmk
818 2394 aaronmk
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
819 2383 aaronmk
    '''Creates a mapping from original column names (which may have collisions)
820 2415 aaronmk
    to names that will be distinct among the columns' tables.
821 2383 aaronmk
    This is meant to be used for several tables that are being joined together.
822 2415 aaronmk
    @param cols The columns to combine. Duplicates will be removed.
823
    @param into The table for the new columns.
824 2394 aaronmk
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
825
        columns will be included in the mapping even if they are not in cols.
826
        The tables of the provided Col objects will be changed to into, so make
827
        copies of them if you want to keep the original tables.
828
    @param as_items Whether to return a list of dict items instead of a dict
829 2383 aaronmk
    @return dict(orig_col=new_col, ...)
830
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
831 2392 aaronmk
        * new_col: sql_gen.Col(orig_col_name, into)
832
        * All mappings use the into table so its name can easily be
833 2383 aaronmk
          changed for all columns at once
834
    '''
835 2415 aaronmk
    cols = lists.uniqify(cols)
836
837 2394 aaronmk
    items = []
838 2389 aaronmk
    for col in preserve:
839 2390 aaronmk
        orig_col = copy.copy(col)
840 2392 aaronmk
        col.table = into
841 2394 aaronmk
        items.append((orig_col, col))
842
    preserve = set(preserve)
843
    for col in cols:
844 2716 aaronmk
        if col not in preserve:
845
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
846 2394 aaronmk
847
    if not as_items: items = dict(items)
848
    return items
849 2383 aaronmk
850 2393 aaronmk
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
851 2391 aaronmk
    '''For params, see mk_flatten_mapping()
852
    @return See return value of mk_flatten_mapping()
853
    '''
854 2394 aaronmk
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
855
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
856 2786 aaronmk
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
857 2846 aaronmk
        into=into, add_indexes_=True)
858 2394 aaronmk
    return dict(items)
859 2391 aaronmk
860 3050 aaronmk
def track_data_error(db, errors_table, cols, value, error_code, error):
861 2758 aaronmk
    '''
862
    @param errors_table If None, does nothing.
863
    '''
864 2810 aaronmk
    if errors_table == None or cols == (): return
865 3050 aaronmk
866
    for col in cols:
867
        try:
868
            insert(db, errors_table, dict(column=col.name, value=value,
869
                error_code=error_code, error=error), recover=True,
870
                cacheable=True, log_level=4)
871
        except DuplicateKeyException: pass
872 2758 aaronmk
873 2704 aaronmk
def cast(db, type_, col, errors_table=None):
874
    '''Casts an (unrenamed) column or value.
875 2769 aaronmk
    If errors_table set and col has srcs, saves errors in errors_table (using
876
    col's srcs attr as the source columns) and converts errors to warnings.
877 2820 aaronmk
    @param col str|sql_gen.Col|sql_gen.Literal
878 2704 aaronmk
    @param errors_table None|sql_gen.Table|str
879
    '''
880 2820 aaronmk
    col = sql_gen.as_Col(col)
881 2769 aaronmk
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
882
        and col.srcs != ())
883 2987 aaronmk
    if not save_errors: return sql_gen.Cast(type_, col) # can't save errors
884 2704 aaronmk
885
    assert not isinstance(col, sql_gen.NamedCol)
886
887 2770 aaronmk
    errors_table = sql_gen.as_Table(errors_table)
888
    srcs = map(sql_gen.to_name_only_col, col.srcs)
889
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
890 2918 aaronmk
    function = db.TempFunction(function_name)
891 2704 aaronmk
892 2718 aaronmk
    while True:
893
        # Create function definition
894 3049 aaronmk
        errors_table_cols = map(sql_gen.Col,
895
            ['column', 'value', 'error_code', 'error'])
896 2718 aaronmk
        query = '''\
897 2704 aaronmk
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
898
RETURNS '''+type_+'''
899
LANGUAGE plpgsql
900
STRICT
901
AS $$
902
BEGIN
903
    /* The explicit cast to the return type is needed to make the cast happen
904
    inside the try block. (Implicit casts to the return type happen at the end
905
    of the function, outside any block.) */
906
    RETURN value::'''+type_+''';
907
EXCEPTION
908
    WHEN data_exception THEN
909
        -- Save error in errors table.
910 3049 aaronmk
        DECLARE
911
            error_code text := SQLSTATE;
912
            error text := SQLERRM;
913
            "column" text;
914
        BEGIN
915
            -- Insert the value and error for *each* source column.
916
            FOR "column" IN
917
'''+mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
918 3053 aaronmk
    order_by=None, start=0)+'''
919 3049 aaronmk
            LOOP
920
                BEGIN
921
'''+mk_insert_select(db, errors_table, errors_table_cols,
922
    sql_gen.Values(errors_table_cols).to_str(db))+''';
923
                EXCEPTION
924
                    WHEN unique_violation THEN NULL; -- continue to next row
925
                END;
926
            END LOOP;
927
        END;
928 2704 aaronmk
929
        RAISE WARNING '%', SQLERRM;
930
        RETURN NULL;
931
END;
932
$$;
933
'''
934 2718 aaronmk
935
        # Create function
936
        try:
937
            run_query(db, query, recover=True, cacheable=True,
938 2945 aaronmk
                log_ignore_excs=(DuplicateException,))
939 2718 aaronmk
            break # successful
940 2945 aaronmk
        except DuplicateException:
941 2770 aaronmk
            function.name = next_version(function.name)
942
            # try again with next version of name
943 2704 aaronmk
944
    return sql_gen.FunctionCall(function, col)
945
946 2414 aaronmk
##### Database structure queries
947
948 2426 aaronmk
def table_row_count(db, table, recover=None):
949 2786 aaronmk
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
950 2443 aaronmk
        order_by=None, start=0), recover=recover, log_level=3))
951 2426 aaronmk
952 2414 aaronmk
def table_cols(db, table, recover=None):
953
    return list(col_names(select(db, table, limit=0, order_by=None,
954 2443 aaronmk
        recover=recover, log_level=4)))
955 2414 aaronmk
956 2291 aaronmk
def pkey(db, table, recover=None):
957 832 aaronmk
    '''Assumed to be first column in table'''
958 2339 aaronmk
    return table_cols(db, table, recover)[0]
959 832 aaronmk
960 2559 aaronmk
not_null_col = 'not_null_col'
961 2340 aaronmk
962
def table_not_null_col(db, table, recover=None):
963
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
964
    if not_null_col in table_cols(db, table, recover): return not_null_col
965
    else: return pkey(db, table, recover)
966
967 853 aaronmk
def index_cols(db, table, index):
968
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
969
    automatically created. When you don't know whether something is a UNIQUE
970
    constraint or a UNIQUE index, use this function.'''
971 1909 aaronmk
    module = util.root_module(db.db)
972
    if module == 'psycopg2':
973
        return list(values(run_query(db, '''\
974 853 aaronmk
SELECT attname
975 866 aaronmk
FROM
976
(
977
        SELECT attnum, attname
978
        FROM pg_index
979
        JOIN pg_class index ON index.oid = indexrelid
980
        JOIN pg_class table_ ON table_.oid = indrelid
981
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
982
        WHERE
983 2782 aaronmk
            table_.relname = '''+db.esc_value(table)+'''
984
            AND index.relname = '''+db.esc_value(index)+'''
985 866 aaronmk
    UNION
986
        SELECT attnum, attname
987
        FROM
988
        (
989
            SELECT
990
                indrelid
991
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
992
                    AS indkey
993
            FROM pg_index
994
            JOIN pg_class index ON index.oid = indexrelid
995
            JOIN pg_class table_ ON table_.oid = indrelid
996
            WHERE
997 2782 aaronmk
                table_.relname = '''+db.esc_value(table)+'''
998
                AND index.relname = '''+db.esc_value(index)+'''
999 866 aaronmk
        ) s
1000
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
1001
) s
1002 853 aaronmk
ORDER BY attnum
1003 2782 aaronmk
'''
1004
            , cacheable=True, log_level=4)))
1005 1909 aaronmk
    else: raise NotImplementedError("Can't list index columns for "+module+
1006
        ' database')
1007 853 aaronmk
1008 464 aaronmk
def constraint_cols(db, table, constraint):
1009 1849 aaronmk
    module = util.root_module(db.db)
1010 464 aaronmk
    if module == 'psycopg2':
1011
        return list(values(run_query(db, '''\
1012
SELECT attname
1013
FROM pg_constraint
1014
JOIN pg_class ON pg_class.oid = conrelid
1015
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
1016
WHERE
1017 2783 aaronmk
    relname = '''+db.esc_value(table)+'''
1018
    AND conname = '''+db.esc_value(constraint)+'''
1019 464 aaronmk
ORDER BY attnum
1020 2783 aaronmk
'''
1021
            )))
1022 464 aaronmk
    else: raise NotImplementedError("Can't list constraint columns for "+module+
1023
        ' database')
1024
1025 2096 aaronmk
row_num_col = '_row_num'
1026
1027 2998 aaronmk
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True):
1028 2688 aaronmk
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
1029 2538 aaronmk
    Currently, only function calls are supported as expressions.
1030 2998 aaronmk
    @param ensure_not_null_ If set, translates NULL values to sentinel values.
1031 2847 aaronmk
        This allows indexes to be used for comparisons where NULLs are equal.
1032 2538 aaronmk
    '''
1033 2964 aaronmk
    exprs = lists.mk_seq(exprs)
1034 2538 aaronmk
1035 2688 aaronmk
    # Parse exprs
1036
    old_exprs = exprs[:]
1037
    exprs = []
1038
    cols = []
1039
    for i, expr in enumerate(old_exprs):
1040 2823 aaronmk
        expr = sql_gen.as_Col(expr, table)
1041 2688 aaronmk
1042 2823 aaronmk
        # Handle nullable columns
1043 2998 aaronmk
        if ensure_not_null_:
1044
            try: expr = ensure_not_null(db, expr)
1045 2860 aaronmk
            except KeyError: pass # unknown type, so just create plain index
1046 2823 aaronmk
1047 2688 aaronmk
        # Extract col
1048 3002 aaronmk
        expr = copy.deepcopy(expr) # don't modify input!
1049 2688 aaronmk
        if isinstance(expr, sql_gen.FunctionCall):
1050
            col = expr.args[0]
1051
            expr = sql_gen.Expr(expr)
1052
        else: col = expr
1053 2823 aaronmk
        assert isinstance(col, sql_gen.Col)
1054 2688 aaronmk
1055
        # Extract table
1056
        if table == None:
1057
            assert sql_gen.is_table_col(col)
1058
            table = col.table
1059
1060
        col.table = None
1061
1062
        exprs.append(expr)
1063
        cols.append(col)
1064 2408 aaronmk
1065 2688 aaronmk
    table = sql_gen.as_Table(table)
1066
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
1067
1068 3005 aaronmk
    # Add index
1069
    while True:
1070
        str_ = 'CREATE'
1071
        if unique: str_ += ' UNIQUE'
1072
        str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
1073
            ', '.join((v.to_str(db) for v in exprs)))+')'
1074
1075
        try:
1076
            run_query(db, str_, recover=True, cacheable=True, log_level=3,
1077
                log_ignore_excs=(DuplicateException,))
1078
            break
1079
        except DuplicateException:
1080
            index.name = next_version(index.name)
1081
            # try again with next version of name
1082 2408 aaronmk
1083 2686 aaronmk
def add_pkey(db, table, cols=None, recover=None):
1084
    '''Adds a primary key.
1085
    @param cols [sql_gen.Col,...] The columns in the primary key.
1086
        Defaults to the first column in the table.
1087 2406 aaronmk
    @pre The table must not already have a primary key.
1088
    '''
1089
    table = sql_gen.as_Table(table)
1090 2686 aaronmk
    if cols == None: cols = [pkey(db, table, recover)]
1091
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
1092 2406 aaronmk
1093 2781 aaronmk
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
1094
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
1095 2945 aaronmk
        log_ignore_excs=(DuplicateException,))
1096 2406 aaronmk
1097 2997 aaronmk
def add_not_null(db, col):
1098
    table = col.table
1099
    col = sql_gen.to_name_only_col(col)
1100
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN '
1101 3031 aaronmk
        +col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3)
1102 2997 aaronmk
1103
def add_index_col(db, col, suffix, expr, nullable=True):
1104 3000 aaronmk
    if sql_gen.index_col(col) != None: return # already has index col
1105 2997 aaronmk
1106
    new_col = sql_gen.suffixed_col(col, suffix)
1107
1108 3006 aaronmk
    # Add column
1109 3038 aaronmk
    new_typed_col = sql_gen.TypedCol(new_col.name, db.col_info(col).type)
1110 3045 aaronmk
    add_col(db, col.table, new_typed_col, comment='src: '+repr(col),
1111
        log_level=3)
1112 3037 aaronmk
    new_col.name = new_typed_col.name # propagate any renaming
1113 3006 aaronmk
1114 3064 aaronmk
    update(db, col.table, [(new_col, expr)], in_place=True, cacheable=True,
1115
        log_level=3)
1116 2997 aaronmk
    if not nullable: add_not_null(db, new_col)
1117
    add_index(db, new_col)
1118
1119 3000 aaronmk
    col.table.index_cols[col.name] = new_col
1120 2997 aaronmk
1121 3047 aaronmk
# Controls when ensure_not_null() will use index columns
1122
not_null_index_cols_min_rows = 0 # rows; initially always use index columns
1123
1124 2997 aaronmk
def ensure_not_null(db, col):
1125
    '''For params, see sql_gen.ensure_not_null()'''
1126
    expr = sql_gen.ensure_not_null(db, col)
1127
1128 3047 aaronmk
    # If a nullable column in a temp table, add separate index column instead.
1129
    # Note that for small datasources, this adds 6-25% to the total import time.
1130
    if (sql_gen.is_temp_col(col) and isinstance(expr, sql_gen.EnsureNotNull)
1131
        and table_row_count(db, col.table) >= not_null_index_cols_min_rows):
1132 2997 aaronmk
        add_index_col(db, col, '::NOT NULL', expr, nullable=False)
1133 3000 aaronmk
        expr = sql_gen.index_col(col)
1134 2997 aaronmk
1135
    return expr
1136
1137 2772 aaronmk
already_indexed = object() # tells add_indexes() the pkey has already been added
1138
1139
def add_indexes(db, table, has_pkey=True):
1140
    '''Adds an index on all columns in a table.
1141
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
1142
        index should be added on the first column.
1143
        * If already_indexed, the pkey is assumed to have already been added
1144
    '''
1145
    cols = table_cols(db, table)
1146
    if has_pkey:
1147
        if has_pkey is not already_indexed: add_pkey(db, table)
1148
        cols = cols[1:]
1149
    for col in cols: add_index(db, col, table)
1150
1151 3042 aaronmk
def add_col(db, table, col, comment=None, **kw_args):
1152 3039 aaronmk
    '''
1153 3042 aaronmk
    @param col TypedCol Name may be versioned, so be sure to propagate any
1154
        renaming back to any source column for the TypedCol.
1155
    @param comment None|str SQL comment used to distinguish columns of the same
1156
        name from each other when they contain different data, to allow the
1157
        ADD COLUMN query to be cached. If not set, query will not be cached.
1158 3039 aaronmk
    '''
1159 2934 aaronmk
    assert isinstance(col, sql_gen.TypedCol)
1160 3036 aaronmk
1161
    while True:
1162 3042 aaronmk
        str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db)
1163
        if comment != None: str_ += ' '+sql_gen.esc_comment(comment)
1164
1165 3036 aaronmk
        try:
1166 3042 aaronmk
            run_query(db, str_, recover=True, cacheable=True, **kw_args)
1167 3036 aaronmk
            break
1168
        except DuplicateException:
1169
            col.name = next_version(col.name)
1170
            # try again with next version of name
1171 2934 aaronmk
1172 2935 aaronmk
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
1173
    constraints='PRIMARY KEY')
1174
1175 2086 aaronmk
def add_row_num(db, table):
1176 2117 aaronmk
    '''Adds a row number column to a table. Its name is in row_num_col. It will
1177
    be the primary key.'''
1178 2935 aaronmk
    add_col(db, table, row_num_typed_col, log_level=3)
1179 2086 aaronmk
1180 2936 aaronmk
def cast_temp_col(db, type_, col, errors_table=None):
1181
    '''Like cast(), but creates a new column with the cast values if the input
1182
    is a column.
1183
    @return The new column or cast value
1184
    '''
1185
    def cast_(col): return cast(db, type_, col, errors_table)
1186
1187
    try: col = sql_gen.underlying_col(col)
1188
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
1189
1190
    table = col.table
1191
    new_col = sql_gen.Col(sql_gen.concat(col.name, '::'+type_), table, col.srcs)
1192
    expr = cast_(col)
1193 3035 aaronmk
1194
    # Add column
1195 3038 aaronmk
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
1196 3045 aaronmk
    add_col(db, table, new_typed_col, comment='src: '+repr(col))
1197 3037 aaronmk
    new_col.name = new_typed_col.name # propagate any renaming
1198 3035 aaronmk
1199 3064 aaronmk
    update(db, table, [(new_col, expr)], in_place=True, cacheable=True)
1200 2955 aaronmk
    add_index(db, new_col)
1201 2936 aaronmk
1202
    return new_col
1203
1204 2889 aaronmk
def drop_table(db, table):
1205
    table = sql_gen.as_Table(table)
1206
    return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE')
1207
1208 2760 aaronmk
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
1209 2675 aaronmk
    '''Creates a table.
1210 2681 aaronmk
    @param cols [sql_gen.TypedCol,...] The column names and types
1211
    @param has_pkey If set, the first column becomes the primary key.
1212 2760 aaronmk
    @param col_indexes bool|[ref]
1213
        * If True, indexes will be added on all non-pkey columns.
1214
        * If a list reference, [0] will be set to a function to do this.
1215
          This can be used to delay index creation until the table is populated.
1216 2675 aaronmk
    '''
1217
    table = sql_gen.as_Table(table)
1218
1219 2681 aaronmk
    if has_pkey:
1220
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1221 2872 aaronmk
        pkey.constraints = 'PRIMARY KEY'
1222 2681 aaronmk
1223 2675 aaronmk
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1224 2681 aaronmk
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1225
    str_ += '\n);\n'
1226 2675 aaronmk
    run_query(db, str_, cacheable=True, log_level=2)
1227 2760 aaronmk
1228
    # Add indexes
1229 2773 aaronmk
    if has_pkey: has_pkey = already_indexed
1230
    def add_indexes_(): add_indexes(db, table, has_pkey)
1231
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1232
    elif col_indexes: add_indexes_() # add now
1233 2675 aaronmk
1234 3068 aaronmk
def analyze(db, table):
1235
    table = sql_gen.as_Table(table)
1236
    run_query(db, 'ANALYZE '+table.to_str(db), log_level=3)
1237
1238 3074 aaronmk
def autoanalyze(db, table):
1239
    if db.autoanalyze: analyze(db, table)
1240
1241 2684 aaronmk
def vacuum(db, table):
1242
    table = sql_gen.as_Table(table)
1243
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1244
        log_level=3))
1245
1246 2970 aaronmk
def truncate(db, table, schema='public', **kw_args):
1247
    '''For params, see run_query()'''
1248 2777 aaronmk
    table = sql_gen.as_Table(table, schema)
1249 2970 aaronmk
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE', **kw_args)
1250 2732 aaronmk
1251 2965 aaronmk
def empty_temp(db, tables):
1252 2972 aaronmk
    if db.debug_temp: return # leave temp tables there for debugging
1253 2965 aaronmk
    tables = lists.mk_seq(tables)
1254 2971 aaronmk
    for table in tables: truncate(db, table, log_level=3)
1255 2965 aaronmk
1256 2729 aaronmk
def tables(db, schema_like='public', table_like='%', exact=False):
1257
    if exact: compare = '='
1258
    else: compare = 'LIKE'
1259
1260 1849 aaronmk
    module = util.root_module(db.db)
1261 832 aaronmk
    if module == 'psycopg2':
1262 2779 aaronmk
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1263
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1264
        return values(select(db, 'pg_tables', ['tablename'], conds,
1265
            order_by='tablename', log_level=4))
1266 1968 aaronmk
    elif module == 'MySQLdb':
1267 2779 aaronmk
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1268
            , cacheable=True, log_level=4))
1269 832 aaronmk
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1270 830 aaronmk
1271 2726 aaronmk
def table_exists(db, table):
1272
    table = sql_gen.as_Table(table)
1273 2730 aaronmk
    return list(tables(db, table.schema, table.name, exact=True)) != []
1274 2726 aaronmk
1275 2913 aaronmk
def function_exists(db, function):
1276
    function = sql_gen.as_Function(function)
1277
1278
    info_table = sql_gen.Table('routines', 'information_schema')
1279
    conds = [('routine_name', function.name)]
1280
    schema = function.schema
1281
    if schema != None: conds.append(('routine_schema', schema))
1282 2914 aaronmk
    # Exclude trigger functions, since they cannot be called directly
1283
    conds.append(('data_type', sql_gen.CompareCond('trigger', '!=')))
1284 2913 aaronmk
1285
    return list(values(select(db, info_table, ['routine_name'], conds,
1286
        order_by='routine_schema', limit=1, log_level=4))) != []
1287
        # TODO: order_by search_path schema order
1288
1289 2726 aaronmk
def errors_table(db, table, if_exists=True):
1290
    '''
1291
    @param if_exists If set, returns None if the errors table doesn't exist
1292
    @return None|sql_gen.Table
1293
    '''
1294
    table = sql_gen.as_Table(table)
1295
    if table.srcs != (): table = table.srcs[0]
1296
1297
    errors_table = sql_gen.suffixed_table(table, '.errors')
1298
    if if_exists and not table_exists(db, errors_table): return None
1299
    return errors_table
1300
1301 833 aaronmk
##### Database management
1302
1303 1968 aaronmk
def empty_db(db, schema='public', **kw_args):
1304
    '''For kw_args, see tables()'''
1305
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1306 833 aaronmk
1307 832 aaronmk
##### Heuristic queries
1308
1309 2104 aaronmk
def put(db, table, row, pkey_=None, row_ct_ref=None):
1310 1554 aaronmk
    '''Recovers from errors.
1311 2077 aaronmk
    Only works under PostgreSQL (uses INSERT RETURNING).
1312
    '''
1313 2642 aaronmk
    row = sql_gen.ColDict(db, table, row)
1314 2104 aaronmk
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1315
1316 471 aaronmk
    try:
1317 2149 aaronmk
        cur = insert(db, table, row, pkey_, recover=True)
1318 1554 aaronmk
        if row_ct_ref != None and cur.rowcount >= 0:
1319
            row_ct_ref[0] += cur.rowcount
1320
        return value(cur)
1321 471 aaronmk
    except DuplicateKeyException, e:
1322 2653 aaronmk
        row = sql_gen.ColDict(db, table,
1323
            util.dict_subset_right_join(row, e.cols))
1324
        return value(select(db, table, [pkey_], row, recover=True))
1325 471 aaronmk
1326 473 aaronmk
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1327 830 aaronmk
    '''Recovers from errors'''
1328 2209 aaronmk
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1329 14 aaronmk
    except StopIteration:
1330 40 aaronmk
        if not create: raise
1331 471 aaronmk
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1332 2078 aaronmk
1333 2593 aaronmk
def is_func_result(col):
1334
    return col.table.name.find('(') >= 0 and col.name == 'result'
1335
1336 2592 aaronmk
def into_table_name(out_table, in_tables0, mapping, is_func):
1337 2615 aaronmk
    def in_col_str(in_col):
1338
        in_col = sql_gen.remove_col_rename(in_col)
1339
        if isinstance(in_col, sql_gen.Col):
1340
            table = in_col.table
1341
            if table == in_tables0:
1342
                in_col = sql_gen.to_name_only_col(in_col)
1343
            elif is_func_result(in_col): in_col = table # omit col name
1344
        return str(in_col)
1345
1346 2580 aaronmk
    str_ = str(out_table)
1347
    if is_func:
1348 2615 aaronmk
        str_ += '('
1349 2580 aaronmk
1350 2615 aaronmk
        try: value_in_col = mapping['value']
1351
        except KeyError:
1352 2654 aaronmk
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1353 2615 aaronmk
                for k, v in mapping.iteritems()))
1354
        else: str_ += in_col_str(value_in_col)
1355
1356
        str_ += ')'
1357 2812 aaronmk
    else:
1358
        out_col = 'rank'
1359
        try: in_col = mapping[out_col]
1360
        except KeyError: str_ += '_pkeys'
1361
        else: # has a rank column, so hierarchical
1362
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
1363 2580 aaronmk
    return str_
1364
1365 2508 aaronmk
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1366 2805 aaronmk
    default=None, is_func=False, on_error=exc.raise_):
1367 2078 aaronmk
    '''Recovers from errors.
1368
    Only works under PostgreSQL (uses INSERT RETURNING).
1369 2909 aaronmk
    IMPORTANT: Must be run at the *beginning* of a transaction.
1370 2131 aaronmk
    @param in_tables The main input table to select from, followed by a list of
1371
        tables to join with it using the main input table's pkey
1372 2312 aaronmk
    @param mapping dict(out_table_col=in_table_col, ...)
1373 2612 aaronmk
        * out_table_col: str (*not* sql_gen.Col)
1374
        * in_table_col: sql_gen.Col|literal-value
1375 2489 aaronmk
    @param into The table to contain the output and input pkeys.
1376 2574 aaronmk
        Defaults to `out_table.name+'_pkeys'`.
1377 2509 aaronmk
    @param default The *output* column to use as the pkey for missing rows.
1378
        If this output column does not exist in the mapping, uses None.
1379 2552 aaronmk
    @param is_func Whether out_table is the name of a SQL function, not a table
1380 2312 aaronmk
    @return sql_gen.Col Where the output pkeys are made available
1381 2078 aaronmk
    '''
1382 2329 aaronmk
    out_table = sql_gen.as_Table(out_table)
1383 2312 aaronmk
1384 2450 aaronmk
    def log_debug(msg): db.log_debug(msg, level=1.5)
1385 2505 aaronmk
    def col_ustr(str_):
1386 2567 aaronmk
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1387 2450 aaronmk
1388 2910 aaronmk
    log_debug('********** New iteration **********')
1389
    log_debug('Inserting these input columns into '+strings.as_tt(
1390
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1391
1392 2942 aaronmk
    is_function = function_exists(db, out_table)
1393
1394
    if is_function: out_pkey = 'result'
1395
    else: out_pkey = pkey(db, out_table, recover=True)
1396 2768 aaronmk
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1397
1398
    if mapping == {}: # need at least one column for INSERT SELECT
1399
        mapping = {out_pkey: None} # ColDict will replace with default value
1400
1401 2382 aaronmk
    # Create input joins from list of input tables
1402
    in_tables_ = in_tables[:] # don't modify input!
1403
    in_tables0 = in_tables_.pop(0) # first table is separate
1404 2735 aaronmk
    errors_table_ = errors_table(db, in_tables0)
1405 2279 aaronmk
    in_pkey = pkey(db, in_tables0, recover=True)
1406 2285 aaronmk
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1407 2460 aaronmk
    input_joins = [in_tables0]+[sql_gen.Join(v,
1408
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1409 2131 aaronmk
1410 2592 aaronmk
    if into == None:
1411
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1412
    into = sql_gen.as_Table(into)
1413
1414 2702 aaronmk
    # Set column sources
1415
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1416
    for col in in_cols:
1417
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1418
1419 2486 aaronmk
    log_debug('Joining together input tables into temp table')
1420 2395 aaronmk
    # Place in new table for speed and so don't modify input if values edited
1421 2584 aaronmk
    in_table = sql_gen.Table('in')
1422 2702 aaronmk
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1423
        preserve=[in_pkey_col], start=0))
1424 2395 aaronmk
    input_joins = [in_table]
1425 2486 aaronmk
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1426 2395 aaronmk
1427 2656 aaronmk
    mapping = sql_gen.ColDict(db, out_table, mapping)
1428
        # after applying dicts.join() because that returns a plain dict
1429
1430 2509 aaronmk
    # Resolve default value column
1431 2841 aaronmk
    if default != None:
1432
        try: default = mapping[default]
1433
        except KeyError:
1434 2509 aaronmk
            db.log_debug('Default value column '
1435
                +strings.as_tt(strings.repr_no_u(default))
1436 2511 aaronmk
                +' does not exist in mapping, falling back to None', level=2.1)
1437 2509 aaronmk
            default = None
1438
1439 2387 aaronmk
    pkeys_names = [in_pkey, out_pkey]
1440 2236 aaronmk
    pkeys_cols = [in_pkey_col, out_pkey_col]
1441
1442 2201 aaronmk
    pkeys_table_exists_ref = [False]
1443 2960 aaronmk
    def insert_into_pkeys(joins, cols, distinct=False):
1444
        kw_args = {}
1445
        if distinct: kw_args.update(dict(distinct_on=[in_pkey_col]))
1446
        query = mk_select(db, joins, cols, order_by=None, start=0, **kw_args)
1447
1448 2201 aaronmk
        if pkeys_table_exists_ref[0]:
1449 2786 aaronmk
            insert_select(db, into, pkeys_names, query)
1450 2201 aaronmk
        else:
1451 2786 aaronmk
            run_query_into(db, query, into=into)
1452 2201 aaronmk
            pkeys_table_exists_ref[0] = True
1453
1454 2429 aaronmk
    limit_ref = [None]
1455 2380 aaronmk
    conds = set()
1456 2811 aaronmk
    distinct_on = sql_gen.ColDict(db, out_table)
1457 2325 aaronmk
    def mk_main_select(joins, cols):
1458 2811 aaronmk
        distinct_on_cols = [c.to_Col() for c in distinct_on.values()]
1459
        return mk_select(db, joins, cols, conds, distinct_on_cols,
1460 2429 aaronmk
            limit=limit_ref[0], start=0)
1461 2132 aaronmk
1462 2519 aaronmk
    exc_strs = set()
1463 2309 aaronmk
    def log_exc(e):
1464 2519 aaronmk
        e_str = exc.str_(e, first_line_only=True)
1465
        log_debug('Caught exception: '+e_str)
1466
        assert e_str not in exc_strs # avoid infinite loops
1467
        exc_strs.add(e_str)
1468 2735 aaronmk
1469 2451 aaronmk
    def remove_all_rows():
1470 2896 aaronmk
        log_debug('Ignoring all rows')
1471 2429 aaronmk
        limit_ref[0] = 0 # just create an empty pkeys table
1472 2735 aaronmk
1473 2758 aaronmk
    def ignore(in_col, value, e):
1474
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1475
            e.cause.pgerror)
1476 2895 aaronmk
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
1477 2545 aaronmk
            +strings.as_tt(repr(value)))
1478 2895 aaronmk
1479 2758 aaronmk
    def remove_rows(in_col, value, e):
1480
        ignore(in_col, value, e)
1481 2378 aaronmk
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1482
        assert cond not in conds # avoid infinite loops
1483 2380 aaronmk
        conds.add(cond)
1484 2895 aaronmk
1485 2758 aaronmk
    def invalid2null(in_col, value, e):
1486
        ignore(in_col, value, e)
1487 2403 aaronmk
        update(db, in_table, [(in_col, None)],
1488
            sql_gen.ColValueCond(in_col, value))
1489 2245 aaronmk
1490 2589 aaronmk
    def insert_pkeys_table(which):
1491 2932 aaronmk
        return sql_gen.Table(sql_gen.concat(in_table.name,
1492 2589 aaronmk
            '_insert_'+which+'_pkeys'))
1493
    insert_out_pkeys = insert_pkeys_table('out')
1494
    insert_in_pkeys = insert_pkeys_table('in')
1495
1496 2206 aaronmk
    # Do inserts and selects
1497 2642 aaronmk
    join_cols = sql_gen.ColDict(db, out_table)
1498 2206 aaronmk
    while True:
1499 2521 aaronmk
        if limit_ref[0] == 0: # special case
1500
            log_debug('Creating an empty pkeys table')
1501 2786 aaronmk
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1502 2521 aaronmk
                limit=limit_ref[0]), into=insert_out_pkeys)
1503
            break # don't do main case
1504
1505 2303 aaronmk
        has_joins = join_cols != {}
1506
1507 2903 aaronmk
        log_debug('Trying to insert new rows')
1508
1509 2305 aaronmk
        # Prepare to insert new rows
1510 2325 aaronmk
        insert_joins = input_joins[:] # don't modify original!
1511 2403 aaronmk
        insert_args = dict(recover=True, cacheable=False)
1512 2303 aaronmk
        if has_joins:
1513 3021 aaronmk
            insert_args.update(dict(ignore=True))
1514 2325 aaronmk
        else:
1515 2404 aaronmk
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1516 2786 aaronmk
        main_select = mk_main_select(insert_joins, mapping.values())
1517 2303 aaronmk
1518 2929 aaronmk
        def main_insert():
1519 2942 aaronmk
            if is_function:
1520
                log_debug('Calling function on input rows')
1521
                args = dict(((k.name, v) for k, v in mapping.iteritems()))
1522
                func_call = sql_gen.NamedCol(out_pkey,
1523
                    sql_gen.FunctionCall(out_table, **args))
1524
                insert_into_pkeys(input_joins, [in_pkey_col, func_call])
1525
                return None
1526
            else:
1527
                return insert_select(db, out_table, mapping.keys(), main_select,
1528
                    **insert_args)
1529 2929 aaronmk
1530 2206 aaronmk
        try:
1531 2929 aaronmk
            cur = with_savepoint(db, main_insert)
1532 2357 aaronmk
            break # insert successful
1533 2931 aaronmk
        except MissingCastException, e:
1534
            log_exc(e)
1535
1536
            out_col = e.col
1537
            type_ = e.type
1538
1539
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1540
                +strings.as_tt(type_))
1541 2937 aaronmk
            mapping[out_col] = cast_temp_col(db, type_, mapping[out_col],
1542
                errors_table_)
1543 2206 aaronmk
        except DuplicateKeyException, e:
1544 2309 aaronmk
            log_exc(e)
1545
1546 2258 aaronmk
            old_join_cols = join_cols.copy()
1547 2811 aaronmk
            distinct_on.update(util.dict_subset(mapping, e.cols))
1548 2565 aaronmk
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1549 2486 aaronmk
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1550 2505 aaronmk
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1551 2258 aaronmk
            assert join_cols != old_join_cols # avoid infinite loops
1552 2230 aaronmk
        except NullValueException, e:
1553 2309 aaronmk
            log_exc(e)
1554
1555 2230 aaronmk
            out_col, = e.cols
1556
            try: in_col = mapping[out_col]
1557 2356 aaronmk
            except KeyError:
1558 2486 aaronmk
                log_debug('Missing mapping for NOT NULL column '+out_col)
1559 2451 aaronmk
                remove_all_rows()
1560 2758 aaronmk
            else: remove_rows(in_col, None, e)
1561 2542 aaronmk
        except FunctionValueException, e:
1562
            log_exc(e)
1563
1564
            func_name = e.name
1565
            value = e.value
1566
            for out_col, in_col in mapping.iteritems():
1567 2758 aaronmk
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1568
                invalid2null(in_col, value, e)
1569 2429 aaronmk
        except DatabaseErrors, e:
1570
            log_exc(e)
1571
1572 2808 aaronmk
            log_debug('No handler for exception')
1573
            on_error(e)
1574 2451 aaronmk
            remove_all_rows()
1575 2358 aaronmk
        # after exception handled, rerun loop with additional constraints
1576 2132 aaronmk
1577 2942 aaronmk
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
1578 2357 aaronmk
        row_ct_ref[0] += cur.rowcount
1579
1580 2942 aaronmk
    if is_function: pass # pkeys table already created
1581
    elif has_joins:
1582 2357 aaronmk
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1583 2486 aaronmk
        log_debug('Getting output table pkeys of existing/inserted rows')
1584 2960 aaronmk
        insert_into_pkeys(select_joins, pkeys_cols, distinct=True)
1585 2357 aaronmk
    else:
1586 2404 aaronmk
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1587 2357 aaronmk
1588 2486 aaronmk
        log_debug('Getting input table pkeys of inserted rows')
1589 2786 aaronmk
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1590 2404 aaronmk
            into=insert_in_pkeys)
1591
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1592 2357 aaronmk
1593 2428 aaronmk
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1594
            insert_in_pkeys)
1595
1596 2486 aaronmk
        log_debug('Combining output and input pkeys in inserted order')
1597 2404 aaronmk
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1598 2357 aaronmk
            {row_num_col: sql_gen.join_same_not_null})]
1599 2420 aaronmk
        insert_into_pkeys(pkey_joins, pkeys_names)
1600 2959 aaronmk
1601 2974 aaronmk
        empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
1602 2357 aaronmk
1603 2486 aaronmk
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1604 2594 aaronmk
    add_pkey(db, into)
1605 2407 aaronmk
1606 2508 aaronmk
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1607 2489 aaronmk
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1608 2357 aaronmk
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1609
        # must use join_same_not_null or query will take forever
1610 2420 aaronmk
    insert_into_pkeys(missing_rows_joins,
1611 2508 aaronmk
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1612 2357 aaronmk
1613 2489 aaronmk
    assert table_row_count(db, into) == table_row_count(db, in_table)
1614 2428 aaronmk
1615 2974 aaronmk
    empty_temp(db, in_table)
1616 2959 aaronmk
1617 2702 aaronmk
    srcs = []
1618
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1619
    return sql_gen.Col(out_pkey, into, srcs)
1620 2115 aaronmk
1621
##### Data cleanup
1622
1623 2290 aaronmk
def cleanup_table(db, table, cols):
1624 2677 aaronmk
    table = sql_gen.as_Table(table)
1625
    cols = map(sql_gen.as_Col, cols)
1626 2115 aaronmk
1627 2677 aaronmk
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1628
        +db.esc_value(r'\N')+')')
1629
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1630
        for v in cols]
1631 2115 aaronmk
1632 3066 aaronmk
    update(db, table, changes, in_place=True)