Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
52
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class MissingCastException(DbException):
57
    def __init__(self, type_, col, cause=None):
58
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
59
            +' on column: '+strings.as_tt(col), cause)
60
        self.type = type_
61
        self.col = col
62

    
63
class NameException(DbException): pass
64

    
65
class DuplicateKeyException(ConstraintException): pass
66

    
67
class NullValueException(ConstraintException): pass
68

    
69
class FunctionValueException(ExceptionWithNameValue): pass
70

    
71
class DuplicateTableException(ExceptionWithName): pass
72

    
73
class DuplicateFunctionException(ExceptionWithName): pass
74

    
75
class EmptyRowException(DbException): pass
76

    
77
##### Warnings
78

    
79
class DbWarning(UserWarning): pass
80

    
81
##### Result retrieval
82

    
83
def col_names(cur): return (col[0] for col in cur.description)
84

    
85
def rows(cur): return iter(lambda: cur.fetchone(), None)
86

    
87
def consume_rows(cur):
88
    '''Used to fetch all rows so result will be cached'''
89
    iters.consume_iter(rows(cur))
90

    
91
def next_row(cur): return rows(cur).next()
92

    
93
def row(cur):
94
    row_ = next_row(cur)
95
    consume_rows(cur)
96
    return row_
97

    
98
def next_value(cur): return next_row(cur)[0]
99

    
100
def value(cur): return row(cur)[0]
101

    
102
def values(cur): return iters.func_iter(lambda: next_value(cur))
103

    
104
def value_or_none(cur):
105
    try: return value(cur)
106
    except StopIteration: return None
107

    
108
##### Escaping
109

    
110
def esc_name_by_module(module, name):
111
    if module == 'psycopg2' or module == None: quote = '"'
112
    elif module == 'MySQLdb': quote = '`'
113
    else: raise NotImplementedError("Can't escape name for "+module+' database')
114
    return sql_gen.esc_name(name, quote)
115

    
116
def esc_name_by_engine(engine, name, **kw_args):
117
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
118

    
119
def esc_name(db, name, **kw_args):
120
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
121

    
122
def qual_name(db, schema, table):
123
    def esc_name_(name): return esc_name(db, name)
124
    table = esc_name_(table)
125
    if schema != None: return esc_name_(schema)+'.'+table
126
    else: return table
127

    
128
##### Database connections
129

    
130
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
131

    
132
db_engines = {
133
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
134
    'PostgreSQL': ('psycopg2', {}),
135
}
136

    
137
DatabaseErrors_set = set([DbException])
138
DatabaseErrors = tuple(DatabaseErrors_set)
139

    
140
def _add_module(module):
141
    DatabaseErrors_set.add(module.DatabaseError)
142
    global DatabaseErrors
143
    DatabaseErrors = tuple(DatabaseErrors_set)
144

    
145
def db_config_str(db_config):
146
    return db_config['engine']+' database '+db_config['database']
147

    
148
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
149

    
150
log_debug_none = lambda msg, level=2: None
151

    
152
class DbConn:
153
    def __init__(self, db_config, serializable=True, autocommit=False,
154
        caching=True, log_debug=log_debug_none):
155
        self.db_config = db_config
156
        self.serializable = serializable
157
        self.autocommit = autocommit
158
        self.caching = caching
159
        self.log_debug = log_debug
160
        self.debug = log_debug != log_debug_none
161
        
162
        self.__db = None
163
        self.query_results = {}
164
        self._savepoint = 0
165
        self._notices_seen = set()
166
    
167
    def __getattr__(self, name):
168
        if name == '__dict__': raise Exception('getting __dict__')
169
        if name == 'db': return self._db()
170
        else: raise AttributeError()
171
    
172
    def __getstate__(self):
173
        state = copy.copy(self.__dict__) # shallow copy
174
        state['log_debug'] = None # don't pickle the debug callback
175
        state['_DbConn__db'] = None # don't pickle the connection
176
        return state
177
    
178
    def connected(self): return self.__db != None
179
    
180
    def _db(self):
181
        if self.__db == None:
182
            # Process db_config
183
            db_config = self.db_config.copy() # don't modify input!
184
            schemas = db_config.pop('schemas', None)
185
            module_name, mappings = db_engines[db_config.pop('engine')]
186
            module = __import__(module_name)
187
            _add_module(module)
188
            for orig, new in mappings.iteritems():
189
                try: util.rename_key(db_config, orig, new)
190
                except KeyError: pass
191
            
192
            # Connect
193
            self.__db = module.connect(**db_config)
194
            
195
            # Configure connection
196
            if self.serializable and not self.autocommit: self.run_query(
197
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE', log_level=4)
198
            if schemas != None:
199
                schemas_ = ''.join((esc_name(self, s)+', '
200
                    for s in schemas.split(',')))
201
                self.run_query("SELECT set_config('search_path', "
202
                    +self.esc_value(schemas_)
203
                    +" || current_setting('search_path'), false)", log_level=3)
204
        
205
        return self.__db
206
    
207
    class DbCursor(Proxy):
208
        def __init__(self, outer):
209
            Proxy.__init__(self, outer.db.cursor())
210
            self.outer = outer
211
            self.query_results = outer.query_results
212
            self.query_lookup = None
213
            self.result = []
214
        
215
        def execute(self, query, params=None):
216
            assert params == None or params == [] or params == ()
217
            if params == [] or params == (): params = None
218
                # None turns off the mogrifier
219
            
220
            self._is_insert = query.startswith('INSERT')
221
            self.query_lookup = _query_lookup(query, params)
222
            try:
223
                try:
224
                    cur = self.inner.execute(query, params)
225
                    self.outer.do_autocommit()
226
                finally: self.query = get_cur_query(self.inner, query, params)
227
            except Exception, e:
228
                _add_cursor_info(e, self, query, params)
229
                self.result = e # cache the exception as the result
230
                self._cache_result()
231
                raise
232
            # Fetch all rows so result will be cached
233
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
234
            return cur
235
        
236
        def fetchone(self):
237
            row = self.inner.fetchone()
238
            if row != None: self.result.append(row)
239
            # otherwise, fetched all rows
240
            else: self._cache_result()
241
            return row
242
        
243
        def _cache_result(self):
244
            # For inserts, only cache exceptions since inserts are not
245
            # idempotent, but an invalid insert will always be invalid
246
            if self.query_results != None and (not self._is_insert
247
                or isinstance(self.result, Exception)):
248
                
249
                assert self.query_lookup != None
250
                self.query_results[self.query_lookup] = self.CacheCursor(
251
                    util.dict_subset(dicts.AttrsDictView(self),
252
                    ['query', 'result', 'rowcount', 'description']))
253
        
254
        class CacheCursor:
255
            def __init__(self, cached_result): self.__dict__ = cached_result
256
            
257
            def execute(self, *args, **kw_args):
258
                if isinstance(self.result, Exception): raise self.result
259
                # otherwise, result is a rows list
260
                self.iter = iter(self.result)
261
            
262
            def fetchone(self):
263
                try: return self.iter.next()
264
                except StopIteration: return None
265
    
266
    def esc_value(self, value):
267
        try: str_ = self.mogrify('%s', [value])
268
        except NotImplementedError, e:
269
            module = util.root_module(self.db)
270
            if module == 'MySQLdb':
271
                import _mysql
272
                str_ = _mysql.escape_string(value)
273
            else: raise e
274
        return strings.to_unicode(str_)
275
    
276
    def esc_name(self, name): return esc_name(self, name) # calls global func
277
    
278
    def can_mogrify(self):
279
        module = util.root_module(self.db)
280
        return module == 'psycopg2'
281
    
282
    def mogrify(self, query, params=None):
283
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
284
        else: raise NotImplementedError("Can't mogrify query")
285
    
286
    def print_notices(self):
287
        if hasattr(self.db, 'notices'):
288
            for msg in self.db.notices:
289
                if msg not in self._notices_seen:
290
                    self._notices_seen.add(msg)
291
                    self.log_debug(msg, level=2)
292
    
293
    def run_query(self, query, params=None, cacheable=False, log_level=2,
294
        debug_msg_ref=None):
295
        '''
296
        @param params Not supported. Use self.esc_value() instead.
297
        @param log_ignore_excs The log_level will be increased by 2 if the query
298
            throws one of these exceptions.
299
        @param debug_msg_ref If specified, the log message will be returned in
300
            this instead of being output. This allows you to filter log messages
301
            depending on the result of the query.
302
        '''
303
        assert query != None
304
        
305
        if not self.caching: cacheable = False
306
        used_cache = False
307
        
308
        def log_msg(query):
309
            if used_cache: cache_status = 'cache hit'
310
            elif cacheable: cache_status = 'cache miss'
311
            else: cache_status = 'non-cacheable'
312
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
313
        
314
        try:
315
            # Get cursor
316
            if cacheable:
317
                query_lookup = _query_lookup(query, params)
318
                try:
319
                    cur = self.query_results[query_lookup]
320
                    used_cache = True
321
                except KeyError: cur = self.DbCursor(self)
322
            else: cur = self.db.cursor()
323
            
324
            # Log query
325
            if self.debug and debug_msg_ref == None: # log before running
326
                self.log_debug(log_msg(query), log_level)
327
            
328
            # Run query
329
            cur.execute(query, params)
330
        finally:
331
            self.print_notices()
332
            if self.debug and debug_msg_ref != None: # return after running
333
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query,
334
                    params)))
335
        
336
        return cur
337
    
338
    def is_cached(self, query, params=None):
339
        return _query_lookup(query, params) in self.query_results
340
    
341
    def with_autocommit(self, func, autocommit=True):
342
        prev_autocommit = self.db.autocommit
343
        self.db.autocommit = autocommit
344
        try: return func()
345
        finally: self.db.autocommit = prev_autocommit
346
    
347
    def with_savepoint(self, func):
348
        savepoint = 'level_'+str(self._savepoint)
349
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
350
        self._savepoint += 1
351
        try:
352
            try: return_val = func()
353
            finally:
354
                self._savepoint -= 1
355
                assert self._savepoint >= 0
356
        except:
357
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
358
            raise
359
        else:
360
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
361
            self.do_autocommit()
362
            return return_val
363
    
364
    def do_autocommit(self):
365
        '''Autocommits if outside savepoint'''
366
        assert self._savepoint >= 0
367
        if self.autocommit and self._savepoint == 0:
368
            self.log_debug('Autocommitting')
369
            self.db.commit()
370
    
371
    def col_default(self, col):
372
        table = sql_gen.Table('columns', 'information_schema')
373
        
374
        conds = [('table_name', col.table.name), ('column_name', col.name)]
375
        schema = col.table.schema
376
        if schema != None: conds.append(('table_schema', schema))
377
        
378
        return sql_gen.as_Code(value(select(self, table, ['column_default'],
379
            conds, order_by='table_schema', limit=1, log_level=3)))
380
            # TODO: order_by search_path schema order
381

    
382
connect = DbConn
383

    
384
##### Querying
385

    
386
def run_raw_query(db, *args, **kw_args):
387
    '''For params, see DbConn.run_query()'''
388
    return db.run_query(*args, **kw_args)
389

    
390
def mogrify(db, query, params):
391
    module = util.root_module(db.db)
392
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
393
    else: raise NotImplementedError("Can't mogrify query for "+module+
394
        ' database')
395

    
396
##### Recoverable querying
397

    
398
def with_savepoint(db, func): return db.with_savepoint(func)
399

    
400
def run_query(db, query, recover=None, cacheable=False, log_level=2,
401
    log_ignore_excs=None, **kw_args):
402
    '''For params, see run_raw_query()'''
403
    if recover == None: recover = False
404
    if log_ignore_excs == None: log_ignore_excs = ()
405
    log_ignore_excs = tuple(log_ignore_excs)
406
    
407
    debug_msg_ref = None # usually, db.run_query() logs query before running it
408
    # But if filtering with log_ignore_excs, wait until after exception parsing
409
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None] 
410
    
411
    try:
412
        try:
413
            def run(): return run_raw_query(db, query, [], cacheable,
414
                log_level, debug_msg_ref, **kw_args)
415
            if recover and not db.is_cached(query, []):
416
                return with_savepoint(db, run)
417
            else: return run() # don't need savepoint if cached
418
        except Exception, e:
419
            if not recover: raise # need savepoint to run index_cols()
420
            msg = exc.str_(e)
421
            
422
            match = re.search(r'duplicate key value violates unique constraint '
423
                r'"((_?[^\W_]+)_.+?)"', msg)
424
            if match:
425
                constraint, table = match.groups()
426
                try: cols = index_cols(db, table, constraint)
427
                except NotImplementedError: raise e
428
                else: raise DuplicateKeyException(constraint, cols, e)
429
            
430
            match = re.search(r'null value in column "(.+?)" violates not-null'
431
                r' constraint', msg)
432
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
433
            
434
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
435
                r'|date/time field value out of range): "(.+?)"\n'
436
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
437
            if match:
438
                value, name = match.groups()
439
                raise FunctionValueException(name, strings.to_unicode(value), e)
440
            
441
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
442
                r'is of type', msg)
443
            if match:
444
                col, type_ = match.groups()
445
                raise MissingCastException(type_, col, e)
446
            
447
            match = re.search(r'relation "(.+?)" already exists', msg)
448
            if match: raise DuplicateTableException(match.group(1), e)
449
            
450
            match = re.search(r'function "(.+?)" already exists', msg)
451
            if match: raise DuplicateFunctionException(match.group(1), e)
452
            
453
            raise # no specific exception raised
454
    except log_ignore_excs:
455
        log_level += 2
456
        raise
457
    finally:
458
        if debug_msg_ref != None and debug_msg_ref[0] != None:
459
            db.log_debug(debug_msg_ref[0], log_level)
460

    
461
##### Basic queries
462

    
463
def next_version(name):
464
    version = 1 # first existing name was version 0
465
    match = re.match(r'^(.*)#(\d+)$', name)
466
    if match:
467
        name, version = match.groups()
468
        version = int(version)+1
469
    return sql_gen.add_suffix(name, '#'+str(version))
470

    
471
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
472
    '''Outputs a query to a temp table.
473
    For params, see run_query().
474
    '''
475
    if into == None: return run_query(db, query, **kw_args)
476
    
477
    assert isinstance(into, sql_gen.Table)
478
    
479
    kw_args['recover'] = True
480
    kw_args.setdefault('log_ignore_excs', (DuplicateTableException,))
481
    
482
    temp = not db.autocommit # tables are permanent in autocommit mode
483
    # "temporary tables cannot specify a schema name", so remove schema
484
    if temp: into.schema = None
485
    
486
    # Create table
487
    while True:
488
        create_query = 'CREATE'
489
        if temp: create_query += ' TEMP'
490
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
491
        
492
        try:
493
            cur = run_query(db, create_query, **kw_args)
494
                # CREATE TABLE AS sets rowcount to # rows in query
495
            break
496
        except DuplicateTableException, e:
497
            into.name = next_version(into.name)
498
            # try again with next version of name
499
    
500
    if add_indexes_: add_indexes(db, into)
501
    
502
    return cur
503

    
504
order_by_pkey = object() # tells mk_select() to order by the pkey
505

    
506
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
507

    
508
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
509
    start=None, order_by=order_by_pkey, default_table=None):
510
    '''
511
    @param tables The single table to select from, or a list of tables to join
512
        together, with tables after the first being sql_gen.Join objects
513
    @param fields Use None to select all fields in the table
514
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
515
        * container can be any iterable type
516
        * compare_left_side: sql_gen.Code|str (for col name)
517
        * compare_right_side: sql_gen.ValueCond|literal value
518
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
519
        use all columns
520
    @return query
521
    '''
522
    # Parse tables param
523
    if not lists.is_seq(tables): tables = [tables]
524
    tables = list(tables) # don't modify input! (list() copies input)
525
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
526
    
527
    # Parse other params
528
    if conds == None: conds = []
529
    elif dicts.is_dict(conds): conds = conds.items()
530
    conds = list(conds) # don't modify input! (list() copies input)
531
    assert limit == None or type(limit) == int
532
    assert start == None or type(start) == int
533
    if order_by is order_by_pkey:
534
        if distinct_on != []: order_by = None
535
        else: order_by = pkey(db, table0, recover=True)
536
    
537
    query = 'SELECT'
538
    
539
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
540
    
541
    # DISTINCT ON columns
542
    if distinct_on != []:
543
        query += '\nDISTINCT'
544
        if distinct_on is not distinct_on_all:
545
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
546
    
547
    # Columns
548
    query += '\n'
549
    if fields == None: query += '*'
550
    else:
551
        assert fields != []
552
        query += '\n, '.join(map(parse_col, fields))
553
    
554
    # Main table
555
    query += '\nFROM '+table0.to_str(db)
556
    
557
    # Add joins
558
    left_table = table0
559
    for join_ in tables:
560
        table = join_.table
561
        
562
        # Parse special values
563
        if join_.type_ is sql_gen.filter_out: # filter no match
564
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
565
                None))
566
        
567
        query += '\n'+join_.to_str(db, left_table)
568
        
569
        left_table = table
570
    
571
    missing = True
572
    if conds != []:
573
        if len(conds) == 1: whitespace = ' '
574
        else: whitespace = '\n'
575
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
576
            .to_str(db) for l, r in conds], 'WHERE')
577
        missing = False
578
    if order_by != None:
579
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
580
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
581
    if start != None:
582
        if start != 0: query += '\nOFFSET '+str(start)
583
        missing = False
584
    if missing: warnings.warn(DbWarning(
585
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
586
    
587
    return query
588

    
589
def select(db, *args, **kw_args):
590
    '''For params, see mk_select() and run_query()'''
591
    recover = kw_args.pop('recover', None)
592
    cacheable = kw_args.pop('cacheable', True)
593
    log_level = kw_args.pop('log_level', 2)
594
    
595
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
596
        log_level=log_level)
597

    
598
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
599
    embeddable=False):
600
    '''
601
    @param returning str|None An inserted column (such as pkey) to return
602
    @param embeddable Whether the query should be embeddable as a nested SELECT.
603
        Warning: If you set this and cacheable=True when the query is run, the
604
        query will be fully cached, not just if it raises an exception.
605
    '''
606
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
607
    if cols == []: cols = None # no cols (all defaults) = unknown col names
608
    if cols != None:
609
        cols = [sql_gen.to_name_only_col(v, table).to_str(db) for v in cols]
610
    if select_query == None: select_query = 'DEFAULT VALUES'
611
    if returning != None: returning = sql_gen.as_Col(returning, table)
612
    
613
    # Build query
614
    first_line = 'INSERT INTO '+table.to_str(db)
615
    query = first_line
616
    if cols != None: query += '\n('+', '.join(cols)+')'
617
    query += '\n'+select_query
618
    
619
    if returning != None:
620
        query += '\nRETURNING '+sql_gen.to_name_only_col(returning).to_str(db)
621
    
622
    if embeddable:
623
        assert returning != None
624
        
625
        # Create function
626
        function_name = sql_gen.clean_name(first_line)
627
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
628
        while True:
629
            try:
630
                function = sql_gen.TempFunction(function_name, db.autocommit)
631
                
632
                function_query = '''\
633
CREATE FUNCTION '''+function.to_str(db)+'''()
634
RETURNS '''+return_type+'''
635
LANGUAGE sql
636
AS $$
637
'''+query+''';
638
$$;
639
'''
640
                run_query(db, function_query, recover=True, cacheable=True,
641
                    log_ignore_excs=(DuplicateFunctionException,))
642
                break # this version was successful
643
            except DuplicateFunctionException, e:
644
                function_name = next_version(function_name)
645
                # try again with next version of name
646
        
647
        # Return query that uses function
648
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
649
            [returning]) # AS clause requires function alias
650
        return mk_select(db, func_table, start=0, order_by=None)
651
    
652
    return query
653

    
654
def insert_select(db, *args, **kw_args):
655
    '''For params, see mk_insert_select() and run_query_into()
656
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
657
        values in
658
    '''
659
    into = kw_args.pop('into', None)
660
    if into != None: kw_args['embeddable'] = True
661
    recover = kw_args.pop('recover', None)
662
    cacheable = kw_args.pop('cacheable', True)
663
    log_level = kw_args.pop('log_level', 2)
664
    
665
    return run_query_into(db, mk_insert_select(db, *args, **kw_args), into,
666
        recover=recover, cacheable=cacheable, log_level=log_level)
667

    
668
default = sql_gen.default # tells insert() to use the default value for a column
669

    
670
def insert(db, table, row, *args, **kw_args):
671
    '''For params, see insert_select()'''
672
    if lists.is_seq(row): cols = None
673
    else:
674
        cols = row.keys()
675
        row = row.values()
676
    row = list(row) # ensure that "== []" works
677
    
678
    if row == []: query = None
679
    else: query = sql_gen.Values(row).to_str(db)
680
    
681
    return insert_select(db, table, cols, query, *args, **kw_args)
682

    
683
def mk_update(db, table, changes=None, cond=None):
684
    '''
685
    @param changes [(col, new_value),...]
686
        * container can be any iterable type
687
        * col: sql_gen.Code|str (for col name)
688
        * new_value: sql_gen.Code|literal value
689
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
690
    @return str query
691
    '''
692
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
693
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
694
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
695
    if cond != None: query += '\nWHERE\n'+cond.to_str(db)
696
    
697
    return query
698

    
699
def update(db, *args, **kw_args):
700
    '''For params, see mk_update() and run_query()'''
701
    recover = kw_args.pop('recover', None)
702
    
703
    return run_query(db, mk_update(db, *args, **kw_args), recover)
704

    
705
def last_insert_id(db):
706
    module = util.root_module(db.db)
707
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
708
    elif module == 'MySQLdb': return db.insert_id()
709
    else: return None
710

    
711
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
712
    '''Creates a mapping from original column names (which may have collisions)
713
    to names that will be distinct among the columns' tables.
714
    This is meant to be used for several tables that are being joined together.
715
    @param cols The columns to combine. Duplicates will be removed.
716
    @param into The table for the new columns.
717
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
718
        columns will be included in the mapping even if they are not in cols.
719
        The tables of the provided Col objects will be changed to into, so make
720
        copies of them if you want to keep the original tables.
721
    @param as_items Whether to return a list of dict items instead of a dict
722
    @return dict(orig_col=new_col, ...)
723
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
724
        * new_col: sql_gen.Col(orig_col_name, into)
725
        * All mappings use the into table so its name can easily be
726
          changed for all columns at once
727
    '''
728
    cols = lists.uniqify(cols)
729
    
730
    items = []
731
    for col in preserve:
732
        orig_col = copy.copy(col)
733
        col.table = into
734
        items.append((orig_col, col))
735
    preserve = set(preserve)
736
    for col in cols:
737
        if col not in preserve:
738
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
739
    
740
    if not as_items: items = dict(items)
741
    return items
742

    
743
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
744
    '''For params, see mk_flatten_mapping()
745
    @return See return value of mk_flatten_mapping()
746
    '''
747
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
748
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
749
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
750
        into=into)
751
    return dict(items)
752

    
753
def mk_track_data_error(db, errors_table, cols, value, error_code, error):
754
    cols = map(sql_gen.to_name_only_col, cols)
755
    
756
    columns_cols = ['column']
757
    columns = sql_gen.NamedValues('columns', columns_cols,
758
        [[c.name] for c in cols])
759
    values_cols = ['value', 'error_code', 'error']
760
    values = sql_gen.NamedValues('values', values_cols,
761
        [value, error_code, error])
762
    
763
    select_cols = columns_cols+values_cols
764
    name_only_cols = map(sql_gen.to_name_only_col, select_cols)
765
    errors_table = sql_gen.NamedTable('errors', errors_table)
766
    joins = [columns, sql_gen.Join(values, type_='CROSS'),
767
        sql_gen.Join(errors_table, dict(zip(name_only_cols, select_cols)),
768
        sql_gen.filter_out)]
769
    
770
    return mk_insert_select(db, errors_table, name_only_cols,
771
        mk_select(db, joins, select_cols, order_by=None))
772

    
773
def track_data_error(db, errors_table, *args, **kw_args):
774
    '''
775
    @param errors_table If None, does nothing.
776
    '''
777
    if errors_table == None: return
778
    run_query(db, mk_track_data_error(db, errors_table, *args, **kw_args),
779
        cacheable=True, log_level=4)
780

    
781
def cast(db, type_, col, errors_table=None):
782
    '''Casts an (unrenamed) column or value.
783
    If errors_table set and col has srcs, saves errors in errors_table (using
784
    col's srcs attr as the source columns) and converts errors to warnings.
785
    @param col sql_gen.Col|sql_gen.Literal
786
    @param errors_table None|sql_gen.Table|str
787
    '''
788
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
789
        and col.srcs != ())
790
    if not save_errors: # can't save errors
791
        return sql_gen.CustomCode(col.to_str(db)+'::'+type_) # just cast
792
    
793
    assert not isinstance(col, sql_gen.NamedCol)
794
    
795
    errors_table = sql_gen.as_Table(errors_table)
796
    srcs = map(sql_gen.to_name_only_col, col.srcs)
797
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
798
    function = sql_gen.TempFunction(function_name, db.autocommit)
799
    
800
    while True:
801
        # Create function definition
802
        query = '''\
803
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
804
RETURNS '''+type_+'''
805
LANGUAGE plpgsql
806
STRICT
807
AS $$
808
BEGIN
809
    /* The explicit cast to the return type is needed to make the cast happen
810
    inside the try block. (Implicit casts to the return type happen at the end
811
    of the function, outside any block.) */
812
    RETURN value::'''+type_+''';
813
EXCEPTION
814
    WHEN data_exception THEN
815
        -- Save error in errors table.
816
        -- Insert the value and error for *each* source column.
817
'''+mk_track_data_error(db, errors_table, srcs,
818
    *map(sql_gen.CustomCode, ['value', 'SQLSTATE', 'SQLERRM']))+''';
819
        
820
        RAISE WARNING '%', SQLERRM;
821
        RETURN NULL;
822
END;
823
$$;
824
'''
825
        
826
        # Create function
827
        try:
828
            run_query(db, query, recover=True, cacheable=True,
829
                log_ignore_excs=(DuplicateFunctionException,))
830
            break # successful
831
        except DuplicateFunctionException:
832
            function.name = next_version(function.name)
833
            # try again with next version of name
834
    
835
    return sql_gen.FunctionCall(function, col)
836

    
837
##### Database structure queries
838

    
839
def table_row_count(db, table, recover=None):
840
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
841
        order_by=None, start=0), recover=recover, log_level=3))
842

    
843
def table_cols(db, table, recover=None):
844
    return list(col_names(select(db, table, limit=0, order_by=None,
845
        recover=recover, log_level=4)))
846

    
847
def pkey(db, table, recover=None):
848
    '''Assumed to be first column in table'''
849
    return table_cols(db, table, recover)[0]
850

    
851
not_null_col = 'not_null_col'
852

    
853
def table_not_null_col(db, table, recover=None):
854
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
855
    if not_null_col in table_cols(db, table, recover): return not_null_col
856
    else: return pkey(db, table, recover)
857

    
858
def index_cols(db, table, index):
859
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
860
    automatically created. When you don't know whether something is a UNIQUE
861
    constraint or a UNIQUE index, use this function.'''
862
    module = util.root_module(db.db)
863
    if module == 'psycopg2':
864
        return list(values(run_query(db, '''\
865
SELECT attname
866
FROM
867
(
868
        SELECT attnum, attname
869
        FROM pg_index
870
        JOIN pg_class index ON index.oid = indexrelid
871
        JOIN pg_class table_ ON table_.oid = indrelid
872
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
873
        WHERE
874
            table_.relname = '''+db.esc_value(table)+'''
875
            AND index.relname = '''+db.esc_value(index)+'''
876
    UNION
877
        SELECT attnum, attname
878
        FROM
879
        (
880
            SELECT
881
                indrelid
882
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
883
                    AS indkey
884
            FROM pg_index
885
            JOIN pg_class index ON index.oid = indexrelid
886
            JOIN pg_class table_ ON table_.oid = indrelid
887
            WHERE
888
                table_.relname = '''+db.esc_value(table)+'''
889
                AND index.relname = '''+db.esc_value(index)+'''
890
        ) s
891
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
892
) s
893
ORDER BY attnum
894
'''
895
            , cacheable=True, log_level=4)))
896
    else: raise NotImplementedError("Can't list index columns for "+module+
897
        ' database')
898

    
899
def constraint_cols(db, table, constraint):
900
    module = util.root_module(db.db)
901
    if module == 'psycopg2':
902
        return list(values(run_query(db, '''\
903
SELECT attname
904
FROM pg_constraint
905
JOIN pg_class ON pg_class.oid = conrelid
906
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
907
WHERE
908
    relname = '''+db.esc_value(table)+'''
909
    AND conname = '''+db.esc_value(constraint)+'''
910
ORDER BY attnum
911
'''
912
            )))
913
    else: raise NotImplementedError("Can't list constraint columns for "+module+
914
        ' database')
915

    
916
row_num_col = '_row_num'
917

    
918
def add_index(db, exprs, table=None, unique=False):
919
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
920
    Currently, only function calls are supported as expressions.
921
    '''
922
    if not lists.is_seq(exprs): exprs = [exprs]
923
    
924
    # Parse exprs
925
    old_exprs = exprs[:]
926
    exprs = []
927
    cols = []
928
    for i, expr in enumerate(old_exprs):
929
        expr = copy.deepcopy(expr) # don't modify input!
930
        expr = sql_gen.as_Col(expr)
931
        
932
        # Extract col
933
        if isinstance(expr, sql_gen.FunctionCall):
934
            col = expr.args[0]
935
            expr = sql_gen.Expr(expr)
936
        else: col = expr
937
        
938
        # Extract table
939
        if table == None:
940
            assert sql_gen.is_table_col(col)
941
            table = col.table
942
        
943
        col.table = None
944
        
945
        exprs.append(expr)
946
        cols.append(col)
947
    
948
    table = sql_gen.as_Table(table)
949
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
950
    
951
    str_ = 'CREATE'
952
    if unique: str_ += ' UNIQUE'
953
    str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
954
        ', '.join((v.to_str(db) for v in exprs)))+')'
955
    
956
    try: run_query(db, str_, recover=True, cacheable=True, log_level=3)
957
    except DuplicateTableException: pass # index already existed
958

    
959
def add_pkey(db, table, cols=None, recover=None):
960
    '''Adds a primary key.
961
    @param cols [sql_gen.Col,...] The columns in the primary key.
962
        Defaults to the first column in the table.
963
    @pre The table must not already have a primary key.
964
    '''
965
    table = sql_gen.as_Table(table)
966
    if cols == None: cols = [pkey(db, table, recover)]
967
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
968
    
969
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
970
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
971
        log_ignore_excs=(DuplicateTableException,))
972

    
973
already_indexed = object() # tells add_indexes() the pkey has already been added
974

    
975
def add_indexes(db, table, has_pkey=True):
976
    '''Adds an index on all columns in a table.
977
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
978
        index should be added on the first column.
979
        * If already_indexed, the pkey is assumed to have already been added
980
    '''
981
    cols = table_cols(db, table)
982
    if has_pkey:
983
        if has_pkey is not already_indexed: add_pkey(db, table)
984
        cols = cols[1:]
985
    for col in cols: add_index(db, col, table)
986

    
987
def add_row_num(db, table):
988
    '''Adds a row number column to a table. Its name is in row_num_col. It will
989
    be the primary key.'''
990
    table = sql_gen.as_Table(table).to_str(db)
991
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
992
        +' serial NOT NULL PRIMARY KEY', log_level=3)
993

    
994
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
995
    '''Creates a table.
996
    @param cols [sql_gen.TypedCol,...] The column names and types
997
    @param has_pkey If set, the first column becomes the primary key.
998
    @param col_indexes bool|[ref]
999
        * If True, indexes will be added on all non-pkey columns.
1000
        * If a list reference, [0] will be set to a function to do this.
1001
          This can be used to delay index creation until the table is populated.
1002
    '''
1003
    table = sql_gen.as_Table(table)
1004
    
1005
    if has_pkey:
1006
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1007
        pkey.type += ' NOT NULL PRIMARY KEY'
1008
    
1009
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1010
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1011
    str_ += '\n);\n'
1012
    run_query(db, str_, cacheable=True, log_level=2)
1013
    
1014
    # Add indexes
1015
    if has_pkey: has_pkey = already_indexed
1016
    def add_indexes_(): add_indexes(db, table, has_pkey)
1017
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1018
    elif col_indexes: add_indexes_() # add now
1019

    
1020
def vacuum(db, table):
1021
    table = sql_gen.as_Table(table)
1022
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1023
        log_level=3))
1024

    
1025
def truncate(db, table, schema='public'):
1026
    table = sql_gen.as_Table(table, schema)
1027
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE')
1028

    
1029
def tables(db, schema_like='public', table_like='%', exact=False):
1030
    if exact: compare = '='
1031
    else: compare = 'LIKE'
1032
    
1033
    module = util.root_module(db.db)
1034
    if module == 'psycopg2':
1035
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1036
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1037
        return values(select(db, 'pg_tables', ['tablename'], conds,
1038
            order_by='tablename', log_level=4))
1039
    elif module == 'MySQLdb':
1040
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1041
            , cacheable=True, log_level=4))
1042
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1043

    
1044
def table_exists(db, table):
1045
    table = sql_gen.as_Table(table)
1046
    return list(tables(db, table.schema, table.name, exact=True)) != []
1047

    
1048
def errors_table(db, table, if_exists=True):
1049
    '''
1050
    @param if_exists If set, returns None if the errors table doesn't exist
1051
    @return None|sql_gen.Table
1052
    '''
1053
    table = sql_gen.as_Table(table)
1054
    if table.srcs != (): table = table.srcs[0]
1055
    
1056
    errors_table = sql_gen.suffixed_table(table, '.errors')
1057
    if if_exists and not table_exists(db, errors_table): return None
1058
    return errors_table
1059

    
1060
##### Database management
1061

    
1062
def empty_db(db, schema='public', **kw_args):
1063
    '''For kw_args, see tables()'''
1064
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1065

    
1066
##### Heuristic queries
1067

    
1068
def put(db, table, row, pkey_=None, row_ct_ref=None):
1069
    '''Recovers from errors.
1070
    Only works under PostgreSQL (uses INSERT RETURNING).
1071
    '''
1072
    row = sql_gen.ColDict(db, table, row)
1073
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1074
    
1075
    try:
1076
        cur = insert(db, table, row, pkey_, recover=True)
1077
        if row_ct_ref != None and cur.rowcount >= 0:
1078
            row_ct_ref[0] += cur.rowcount
1079
        return value(cur)
1080
    except DuplicateKeyException, e:
1081
        row = sql_gen.ColDict(db, table,
1082
            util.dict_subset_right_join(row, e.cols))
1083
        return value(select(db, table, [pkey_], row, recover=True))
1084

    
1085
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1086
    '''Recovers from errors'''
1087
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1088
    except StopIteration:
1089
        if not create: raise
1090
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1091

    
1092
def is_func_result(col):
1093
    return col.table.name.find('(') >= 0 and col.name == 'result'
1094

    
1095
def into_table_name(out_table, in_tables0, mapping, is_func):
1096
    def in_col_str(in_col):
1097
        in_col = sql_gen.remove_col_rename(in_col)
1098
        if isinstance(in_col, sql_gen.Col):
1099
            table = in_col.table
1100
            if table == in_tables0:
1101
                in_col = sql_gen.to_name_only_col(in_col)
1102
            elif is_func_result(in_col): in_col = table # omit col name
1103
        return str(in_col)
1104
    
1105
    str_ = str(out_table)
1106
    if is_func:
1107
        str_ += '('
1108
        
1109
        try: value_in_col = mapping['value']
1110
        except KeyError:
1111
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1112
                for k, v in mapping.iteritems()))
1113
        else: str_ += in_col_str(value_in_col)
1114
        
1115
        str_ += ')'
1116
    else: str_ += '_pkeys'
1117
    return str_
1118

    
1119
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1120
    default=None, is_func=False):
1121
    '''Recovers from errors.
1122
    Only works under PostgreSQL (uses INSERT RETURNING).
1123
    @param in_tables The main input table to select from, followed by a list of
1124
        tables to join with it using the main input table's pkey
1125
    @param mapping dict(out_table_col=in_table_col, ...)
1126
        * out_table_col: str (*not* sql_gen.Col)
1127
        * in_table_col: sql_gen.Col|literal-value
1128
    @param into The table to contain the output and input pkeys.
1129
        Defaults to `out_table.name+'_pkeys'`.
1130
    @param default The *output* column to use as the pkey for missing rows.
1131
        If this output column does not exist in the mapping, uses None.
1132
    @param is_func Whether out_table is the name of a SQL function, not a table
1133
    @return sql_gen.Col Where the output pkeys are made available
1134
    '''
1135
    out_table = sql_gen.as_Table(out_table)
1136
    
1137
    def log_debug(msg): db.log_debug(msg, level=1.5)
1138
    def col_ustr(str_):
1139
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1140
    
1141
    out_pkey = pkey(db, out_table, recover=True)
1142
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1143
    
1144
    if mapping == {}: # need at least one column for INSERT SELECT
1145
        mapping = {out_pkey: None} # ColDict will replace with default value
1146
    
1147
    log_debug('********** New iteration **********')
1148
    log_debug('Inserting these input columns into '+strings.as_tt(
1149
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1150
    
1151
    # Create input joins from list of input tables
1152
    in_tables_ = in_tables[:] # don't modify input!
1153
    in_tables0 = in_tables_.pop(0) # first table is separate
1154
    errors_table_ = errors_table(db, in_tables0)
1155
    in_pkey = pkey(db, in_tables0, recover=True)
1156
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1157
    input_joins = [in_tables0]+[sql_gen.Join(v,
1158
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1159
    
1160
    if into == None:
1161
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1162
    into = sql_gen.as_Table(into)
1163
    
1164
    # Set column sources
1165
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1166
    for col in in_cols:
1167
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1168
    
1169
    log_debug('Joining together input tables into temp table')
1170
    # Place in new table for speed and so don't modify input if values edited
1171
    in_table = sql_gen.Table('in')
1172
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1173
        preserve=[in_pkey_col], start=0))
1174
    input_joins = [in_table]
1175
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1176
    
1177
    mapping = sql_gen.ColDict(db, out_table, mapping)
1178
        # after applying dicts.join() because that returns a plain dict
1179
    
1180
    # Resolve default value column
1181
    try: default = mapping[default]
1182
    except KeyError:
1183
        if default != None:
1184
            db.log_debug('Default value column '
1185
                +strings.as_tt(strings.repr_no_u(default))
1186
                +' does not exist in mapping, falling back to None', level=2.1)
1187
            default = None
1188
    
1189
    pkeys_names = [in_pkey, out_pkey]
1190
    pkeys_cols = [in_pkey_col, out_pkey_col]
1191
    
1192
    pkeys_table_exists_ref = [False]
1193
    def insert_into_pkeys(joins, cols):
1194
        query = mk_select(db, joins, cols, order_by=None, start=0)
1195
        if pkeys_table_exists_ref[0]:
1196
            insert_select(db, into, pkeys_names, query)
1197
        else:
1198
            run_query_into(db, query, into=into)
1199
            pkeys_table_exists_ref[0] = True
1200
    
1201
    limit_ref = [None]
1202
    conds = set()
1203
    distinct_on = []
1204
    def mk_main_select(joins, cols):
1205
        return mk_select(db, joins, cols, conds, distinct_on,
1206
            limit=limit_ref[0], start=0)
1207
    
1208
    exc_strs = set()
1209
    def log_exc(e):
1210
        e_str = exc.str_(e, first_line_only=True)
1211
        log_debug('Caught exception: '+e_str)
1212
        assert e_str not in exc_strs # avoid infinite loops
1213
        exc_strs.add(e_str)
1214
    
1215
    def remove_all_rows():
1216
        log_debug('Returning NULL for all rows')
1217
        limit_ref[0] = 0 # just create an empty pkeys table
1218
    
1219
    def ignore(in_col, value, e):
1220
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1221
            e.cause.pgerror)
1222
        
1223
        in_col_str = strings.as_tt(repr(in_col))
1224
        db.log_debug('Adding index on '+in_col_str+' to enable fast filtering',
1225
            level=2.5)
1226
        add_index(db, in_col)
1227
        
1228
        log_debug('Ignoring rows with '+in_col_str+' = '
1229
            +strings.as_tt(repr(value)))
1230
    def remove_rows(in_col, value, e):
1231
        ignore(in_col, value, e)
1232
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1233
        assert cond not in conds # avoid infinite loops
1234
        conds.add(cond)
1235
    def invalid2null(in_col, value, e):
1236
        ignore(in_col, value, e)
1237
        update(db, in_table, [(in_col, None)],
1238
            sql_gen.ColValueCond(in_col, value))
1239
    
1240
    def insert_pkeys_table(which):
1241
        return sql_gen.Table(sql_gen.add_suffix(in_table.name,
1242
            '_insert_'+which+'_pkeys'))
1243
    insert_out_pkeys = insert_pkeys_table('out')
1244
    insert_in_pkeys = insert_pkeys_table('in')
1245
    
1246
    # Do inserts and selects
1247
    join_cols = sql_gen.ColDict(db, out_table)
1248
    while True:
1249
        if limit_ref[0] == 0: # special case
1250
            log_debug('Creating an empty pkeys table')
1251
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1252
                limit=limit_ref[0]), into=insert_out_pkeys)
1253
            break # don't do main case
1254
        
1255
        has_joins = join_cols != {}
1256
        
1257
        # Prepare to insert new rows
1258
        insert_joins = input_joins[:] # don't modify original!
1259
        insert_args = dict(recover=True, cacheable=False)
1260
        if has_joins:
1261
            distinct_on = [v.to_Col() for v in join_cols.values()]
1262
            insert_joins.append(sql_gen.Join(out_table, join_cols,
1263
                sql_gen.filter_out))
1264
        else:
1265
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1266
        main_select = mk_main_select(insert_joins, mapping.values())
1267
        
1268
        log_debug('Trying to insert new rows')
1269
        try:
1270
            cur = insert_select(db, out_table, mapping.keys(), main_select,
1271
                **insert_args)
1272
            break # insert successful
1273
        except DuplicateKeyException, e:
1274
            log_exc(e)
1275
            
1276
            old_join_cols = join_cols.copy()
1277
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1278
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1279
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1280
            assert join_cols != old_join_cols # avoid infinite loops
1281
        except NullValueException, e:
1282
            log_exc(e)
1283
            
1284
            out_col, = e.cols
1285
            try: in_col = mapping[out_col]
1286
            except KeyError:
1287
                log_debug('Missing mapping for NOT NULL column '+out_col)
1288
                remove_all_rows()
1289
            else: remove_rows(in_col, None, e)
1290
        except FunctionValueException, e:
1291
            log_exc(e)
1292
            
1293
            func_name = e.name
1294
            value = e.value
1295
            for out_col, in_col in mapping.iteritems():
1296
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1297
                invalid2null(in_col, value, e)
1298
        except MissingCastException, e:
1299
            log_exc(e)
1300
            
1301
            out_col = e.col
1302
            type_ = e.type
1303
            
1304
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1305
                +strings.as_tt(type_))
1306
            def wrap_func(col): return cast(db, type_, col, errors_table_)
1307
            mapping[out_col] = sql_gen.wrap(wrap_func, mapping[out_col])
1308
        except DatabaseErrors, e:
1309
            log_exc(e)
1310
            
1311
            msg = 'No handler for exception: '+exc.str_(e)
1312
            warnings.warn(DbWarning(msg))
1313
            log_debug(msg)
1314
            remove_all_rows()
1315
        # after exception handled, rerun loop with additional constraints
1316
    
1317
    if row_ct_ref != None and cur.rowcount >= 0:
1318
        row_ct_ref[0] += cur.rowcount
1319
    
1320
    if has_joins:
1321
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1322
        log_debug('Getting output table pkeys of existing/inserted rows')
1323
        insert_into_pkeys(select_joins, pkeys_cols)
1324
    else:
1325
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1326
        
1327
        log_debug('Getting input table pkeys of inserted rows')
1328
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1329
            into=insert_in_pkeys)
1330
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1331
        
1332
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1333
            insert_in_pkeys)
1334
        
1335
        log_debug('Combining output and input pkeys in inserted order')
1336
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1337
            {row_num_col: sql_gen.join_same_not_null})]
1338
        insert_into_pkeys(pkey_joins, pkeys_names)
1339
    
1340
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1341
    add_pkey(db, into)
1342
    
1343
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1344
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1345
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1346
        # must use join_same_not_null or query will take forever
1347
    insert_into_pkeys(missing_rows_joins,
1348
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1349
    
1350
    assert table_row_count(db, into) == table_row_count(db, in_table)
1351
    
1352
    srcs = []
1353
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1354
    return sql_gen.Col(out_pkey, into, srcs)
1355

    
1356
##### Data cleanup
1357

    
1358
def cleanup_table(db, table, cols):
1359
    table = sql_gen.as_Table(table)
1360
    cols = map(sql_gen.as_Col, cols)
1361
    
1362
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1363
        +db.esc_value(r'\N')+')')
1364
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1365
        for v in cols]
1366
    
1367
    update(db, table, changes)
(24-24/36)