Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+strings.ustr(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name)), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self, 'for name: '+strings.as_tt(str(name))
45
            +'; value: '+strings.as_tt(repr(value)), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+strings.as_tt(name)
52
            +' constraint on columns: '+strings.as_tt(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class MissingCastException(DbException):
57
    def __init__(self, type_, col, cause=None):
58
        DbException.__init__(self, 'Missing cast to type '+strings.as_tt(type_)
59
            +' on column: '+strings.as_tt(col), cause)
60
        self.type = type_
61
        self.col = col
62

    
63
class NameException(DbException): pass
64

    
65
class DuplicateKeyException(ConstraintException): pass
66

    
67
class NullValueException(ConstraintException): pass
68

    
69
class FunctionValueException(ExceptionWithNameValue): pass
70

    
71
class DuplicateTableException(ExceptionWithName): pass
72

    
73
class DuplicateFunctionException(ExceptionWithName): pass
74

    
75
class EmptyRowException(DbException): pass
76

    
77
##### Warnings
78

    
79
class DbWarning(UserWarning): pass
80

    
81
##### Result retrieval
82

    
83
def col_names(cur): return (col[0] for col in cur.description)
84

    
85
def rows(cur): return iter(lambda: cur.fetchone(), None)
86

    
87
def consume_rows(cur):
88
    '''Used to fetch all rows so result will be cached'''
89
    iters.consume_iter(rows(cur))
90

    
91
def next_row(cur): return rows(cur).next()
92

    
93
def row(cur):
94
    row_ = next_row(cur)
95
    consume_rows(cur)
96
    return row_
97

    
98
def next_value(cur): return next_row(cur)[0]
99

    
100
def value(cur): return row(cur)[0]
101

    
102
def values(cur): return iters.func_iter(lambda: next_value(cur))
103

    
104
def value_or_none(cur):
105
    try: return value(cur)
106
    except StopIteration: return None
107

    
108
##### Escaping
109

    
110
def esc_name_by_module(module, name):
111
    if module == 'psycopg2' or module == None: quote = '"'
112
    elif module == 'MySQLdb': quote = '`'
113
    else: raise NotImplementedError("Can't escape name for "+module+' database')
114
    return sql_gen.esc_name(name, quote)
115

    
116
def esc_name_by_engine(engine, name, **kw_args):
117
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
118

    
119
def esc_name(db, name, **kw_args):
120
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
121

    
122
def qual_name(db, schema, table):
123
    def esc_name_(name): return esc_name(db, name)
124
    table = esc_name_(table)
125
    if schema != None: return esc_name_(schema)+'.'+table
126
    else: return table
127

    
128
##### Database connections
129

    
130
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
131

    
132
db_engines = {
133
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
134
    'PostgreSQL': ('psycopg2', {}),
135
}
136

    
137
DatabaseErrors_set = set([DbException])
138
DatabaseErrors = tuple(DatabaseErrors_set)
139

    
140
def _add_module(module):
141
    DatabaseErrors_set.add(module.DatabaseError)
142
    global DatabaseErrors
143
    DatabaseErrors = tuple(DatabaseErrors_set)
144

    
145
def db_config_str(db_config):
146
    return db_config['engine']+' database '+db_config['database']
147

    
148
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
149

    
150
log_debug_none = lambda msg, level=2: None
151

    
152
class DbConn:
153
    def __init__(self, db_config, serializable=True, autocommit=False,
154
        caching=True, log_debug=log_debug_none):
155
        self.db_config = db_config
156
        self.serializable = serializable
157
        self.autocommit = autocommit
158
        self.caching = caching
159
        self.log_debug = log_debug
160
        self.debug = log_debug != log_debug_none
161
        
162
        self.__db = None
163
        self.query_results = {}
164
        self._savepoint = 0
165
        self._notices_seen = set()
166
    
167
    def __getattr__(self, name):
168
        if name == '__dict__': raise Exception('getting __dict__')
169
        if name == 'db': return self._db()
170
        else: raise AttributeError()
171
    
172
    def __getstate__(self):
173
        state = copy.copy(self.__dict__) # shallow copy
174
        state['log_debug'] = None # don't pickle the debug callback
175
        state['_DbConn__db'] = None # don't pickle the connection
176
        return state
177
    
178
    def connected(self): return self.__db != None
179
    
180
    def _db(self):
181
        if self.__db == None:
182
            # Process db_config
183
            db_config = self.db_config.copy() # don't modify input!
184
            schemas = db_config.pop('schemas', None)
185
            module_name, mappings = db_engines[db_config.pop('engine')]
186
            module = __import__(module_name)
187
            _add_module(module)
188
            for orig, new in mappings.iteritems():
189
                try: util.rename_key(db_config, orig, new)
190
                except KeyError: pass
191
            
192
            # Connect
193
            self.__db = module.connect(**db_config)
194
            
195
            # Configure connection
196
            if self.serializable and not self.autocommit: self.run_query(
197
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE', log_level=4)
198
            if schemas != None:
199
                schemas_ = ''.join((esc_name(self, s)+', '
200
                    for s in schemas.split(',')))
201
                self.run_query("SELECT set_config('search_path', "
202
                    +self.esc_value(schemas_)
203
                    +" || current_setting('search_path'), false)", log_level=3)
204
        
205
        return self.__db
206
    
207
    class DbCursor(Proxy):
208
        def __init__(self, outer):
209
            Proxy.__init__(self, outer.db.cursor())
210
            self.outer = outer
211
            self.query_results = outer.query_results
212
            self.query_lookup = None
213
            self.result = []
214
        
215
        def execute(self, query, params=None):
216
            assert params == None or params == [] or params == ()
217
            if params == [] or params == (): params = None
218
                # None turns off the mogrifier
219
            
220
            self._is_insert = query.startswith('INSERT')
221
            self.query_lookup = _query_lookup(query, params)
222
            try:
223
                try:
224
                    cur = self.inner.execute(query, params)
225
                    self.outer.do_autocommit()
226
                finally: self.query = get_cur_query(self.inner, query, params)
227
            except Exception, e:
228
                _add_cursor_info(e, self, query, params)
229
                self.result = e # cache the exception as the result
230
                self._cache_result()
231
                raise
232
            # Fetch all rows so result will be cached
233
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
234
            return cur
235
        
236
        def fetchone(self):
237
            row = self.inner.fetchone()
238
            if row != None: self.result.append(row)
239
            # otherwise, fetched all rows
240
            else: self._cache_result()
241
            return row
242
        
243
        def _cache_result(self):
244
            # For inserts, only cache exceptions since inserts are not
245
            # idempotent, but an invalid insert will always be invalid
246
            if self.query_results != None and (not self._is_insert
247
                or isinstance(self.result, Exception)):
248
                
249
                assert self.query_lookup != None
250
                self.query_results[self.query_lookup] = self.CacheCursor(
251
                    util.dict_subset(dicts.AttrsDictView(self),
252
                    ['query', 'result', 'rowcount', 'description']))
253
        
254
        class CacheCursor:
255
            def __init__(self, cached_result): self.__dict__ = cached_result
256
            
257
            def execute(self, *args, **kw_args):
258
                if isinstance(self.result, Exception): raise self.result
259
                # otherwise, result is a rows list
260
                self.iter = iter(self.result)
261
            
262
            def fetchone(self):
263
                try: return self.iter.next()
264
                except StopIteration: return None
265
    
266
    def esc_value(self, value):
267
        try: str_ = self.mogrify('%s', [value])
268
        except NotImplementedError, e:
269
            module = util.root_module(self.db)
270
            if module == 'MySQLdb':
271
                import _mysql
272
                str_ = _mysql.escape_string(value)
273
            else: raise e
274
        return strings.to_unicode(str_)
275
    
276
    def esc_name(self, name): return esc_name(self, name) # calls global func
277
    
278
    def can_mogrify(self):
279
        module = util.root_module(self.db)
280
        return module == 'psycopg2'
281
    
282
    def mogrify(self, query, params=None):
283
        if self.can_mogrify(): return self.db.cursor().mogrify(query, params)
284
        else: raise NotImplementedError("Can't mogrify query")
285
    
286
    def print_notices(self):
287
        if hasattr(self.db, 'notices'):
288
            for msg in self.db.notices:
289
                if msg not in self._notices_seen:
290
                    self._notices_seen.add(msg)
291
                    self.log_debug(msg, level=2)
292
    
293
    def run_query(self, query, cacheable=False, log_level=2,
294
        debug_msg_ref=None):
295
        '''
296
        @param log_ignore_excs The log_level will be increased by 2 if the query
297
            throws one of these exceptions.
298
        @param debug_msg_ref If specified, the log message will be returned in
299
            this instead of being output. This allows you to filter log messages
300
            depending on the result of the query.
301
        '''
302
        assert query != None
303
        
304
        if not self.caching: cacheable = False
305
        used_cache = False
306
        
307
        def log_msg(query):
308
            if used_cache: cache_status = 'cache hit'
309
            elif cacheable: cache_status = 'cache miss'
310
            else: cache_status = 'non-cacheable'
311
            return 'DB query: '+cache_status+':\n'+strings.as_code(query, 'SQL')
312
        
313
        try:
314
            # Get cursor
315
            if cacheable:
316
                query_lookup = _query_lookup(query, [])
317
                try:
318
                    cur = self.query_results[query_lookup]
319
                    used_cache = True
320
                except KeyError: cur = self.DbCursor(self)
321
            else: cur = self.db.cursor()
322
            
323
            # Log query
324
            if self.debug and debug_msg_ref == None: # log before running
325
                self.log_debug(log_msg(query), log_level)
326
            
327
            # Run query
328
            cur.execute(query)
329
        finally:
330
            self.print_notices()
331
            if self.debug and debug_msg_ref != None: # return after running
332
                debug_msg_ref[0] = log_msg(str(get_cur_query(cur, query)))
333
        
334
        return cur
335
    
336
    def is_cached(self, query, params=None):
337
        return _query_lookup(query, params) in self.query_results
338
    
339
    def with_autocommit(self, func, autocommit=True):
340
        prev_autocommit = self.db.autocommit
341
        self.db.autocommit = autocommit
342
        try: return func()
343
        finally: self.db.autocommit = prev_autocommit
344
    
345
    def with_savepoint(self, func):
346
        savepoint = 'level_'+str(self._savepoint)
347
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
348
        self._savepoint += 1
349
        try:
350
            try: return_val = func()
351
            finally:
352
                self._savepoint -= 1
353
                assert self._savepoint >= 0
354
        except:
355
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
356
            raise
357
        else:
358
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
359
            self.do_autocommit()
360
            return return_val
361
    
362
    def do_autocommit(self):
363
        '''Autocommits if outside savepoint'''
364
        assert self._savepoint >= 0
365
        if self.autocommit and self._savepoint == 0:
366
            self.log_debug('Autocommitting')
367
            self.db.commit()
368
    
369
    def col_default(self, col):
370
        table = sql_gen.Table('columns', 'information_schema')
371
        
372
        conds = [('table_name', col.table.name), ('column_name', col.name)]
373
        schema = col.table.schema
374
        if schema != None: conds.append(('table_schema', schema))
375
        
376
        return sql_gen.as_Code(value(select(self, table, ['column_default'],
377
            conds, order_by='table_schema', limit=1, log_level=3)))
378
            # TODO: order_by search_path schema order
379

    
380
connect = DbConn
381

    
382
##### Querying
383

    
384
def mogrify(db, query, params):
385
    module = util.root_module(db.db)
386
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
387
    else: raise NotImplementedError("Can't mogrify query for "+module+
388
        ' database')
389

    
390
##### Recoverable querying
391

    
392
def with_savepoint(db, func): return db.with_savepoint(func)
393

    
394
def run_query(db, query, recover=None, cacheable=False, log_level=2,
395
    log_ignore_excs=None, **kw_args):
396
    '''For params, see DbConn.run_query()'''
397
    if recover == None: recover = False
398
    if log_ignore_excs == None: log_ignore_excs = ()
399
    log_ignore_excs = tuple(log_ignore_excs)
400
    
401
    debug_msg_ref = None # usually, db.run_query() logs query before running it
402
    # But if filtering with log_ignore_excs, wait until after exception parsing
403
    if log_ignore_excs != () or not db.can_mogrify(): debug_msg_ref = [None] 
404
    
405
    try:
406
        try:
407
            def run(): return db.run_query(query, cacheable, log_level,
408
                debug_msg_ref, **kw_args)
409
            if recover and not db.is_cached(query, []):
410
                return with_savepoint(db, run)
411
            else: return run() # don't need savepoint if cached
412
        except Exception, e:
413
            if not recover: raise # need savepoint to run index_cols()
414
            msg = exc.str_(e)
415
            
416
            match = re.search(r'duplicate key value violates unique constraint '
417
                r'"((_?[^\W_]+)_.+?)"', msg)
418
            if match:
419
                constraint, table = match.groups()
420
                try: cols = index_cols(db, table, constraint)
421
                except NotImplementedError: raise e
422
                else: raise DuplicateKeyException(constraint, cols, e)
423
            
424
            match = re.search(r'null value in column "(.+?)" violates not-null'
425
                r' constraint', msg)
426
            if match: raise NullValueException('NOT NULL', [match.group(1)], e)
427
            
428
            match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
429
                r'|date/time field value out of range): "(.+?)"\n'
430
                r'(?:(?s).*?)\bfunction "(.+?)"', msg)
431
            if match:
432
                value, name = match.groups()
433
                raise FunctionValueException(name, strings.to_unicode(value), e)
434
            
435
            match = re.search(r'column "(.+?)" is of type (.+?) but expression '
436
                r'is of type', msg)
437
            if match:
438
                col, type_ = match.groups()
439
                raise MissingCastException(type_, col, e)
440
            
441
            match = re.search(r'relation "(.+?)" already exists', msg)
442
            if match: raise DuplicateTableException(match.group(1), e)
443
            
444
            match = re.search(r'function "(.+?)" already exists', msg)
445
            if match: raise DuplicateFunctionException(match.group(1), e)
446
            
447
            raise # no specific exception raised
448
    except log_ignore_excs:
449
        log_level += 2
450
        raise
451
    finally:
452
        if debug_msg_ref != None and debug_msg_ref[0] != None:
453
            db.log_debug(debug_msg_ref[0], log_level)
454

    
455
##### Basic queries
456

    
457
def next_version(name):
458
    version = 1 # first existing name was version 0
459
    match = re.match(r'^(.*)#(\d+)$', name)
460
    if match:
461
        name, version = match.groups()
462
        version = int(version)+1
463
    return sql_gen.add_suffix(name, '#'+str(version))
464

    
465
def run_query_into(db, query, into=None, add_indexes_=False, **kw_args):
466
    '''Outputs a query to a temp table.
467
    For params, see run_query().
468
    '''
469
    if into == None: return run_query(db, query, **kw_args)
470
    
471
    assert isinstance(into, sql_gen.Table)
472
    
473
    kw_args['recover'] = True
474
    kw_args.setdefault('log_ignore_excs', (DuplicateTableException,))
475
    
476
    temp = not db.autocommit # tables are permanent in autocommit mode
477
    # "temporary tables cannot specify a schema name", so remove schema
478
    if temp: into.schema = None
479
    
480
    # Create table
481
    while True:
482
        create_query = 'CREATE'
483
        if temp: create_query += ' TEMP'
484
        create_query += ' TABLE '+into.to_str(db)+' AS\n'+query
485
        
486
        try:
487
            cur = run_query(db, create_query, **kw_args)
488
                # CREATE TABLE AS sets rowcount to # rows in query
489
            break
490
        except DuplicateTableException, e:
491
            into.name = next_version(into.name)
492
            # try again with next version of name
493
    
494
    if add_indexes_: add_indexes(db, into)
495
    
496
    return cur
497

    
498
order_by_pkey = object() # tells mk_select() to order by the pkey
499

    
500
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
501

    
502
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
503
    start=None, order_by=order_by_pkey, default_table=None):
504
    '''
505
    @param tables The single table to select from, or a list of tables to join
506
        together, with tables after the first being sql_gen.Join objects
507
    @param fields Use None to select all fields in the table
508
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
509
        * container can be any iterable type
510
        * compare_left_side: sql_gen.Code|str (for col name)
511
        * compare_right_side: sql_gen.ValueCond|literal value
512
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
513
        use all columns
514
    @return query
515
    '''
516
    # Parse tables param
517
    if not lists.is_seq(tables): tables = [tables]
518
    tables = list(tables) # don't modify input! (list() copies input)
519
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
520
    
521
    # Parse other params
522
    if conds == None: conds = []
523
    elif dicts.is_dict(conds): conds = conds.items()
524
    conds = list(conds) # don't modify input! (list() copies input)
525
    assert limit == None or type(limit) == int
526
    assert start == None or type(start) == int
527
    if order_by is order_by_pkey:
528
        if distinct_on != []: order_by = None
529
        else: order_by = pkey(db, table0, recover=True)
530
    
531
    query = 'SELECT'
532
    
533
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
534
    
535
    # DISTINCT ON columns
536
    if distinct_on != []:
537
        query += '\nDISTINCT'
538
        if distinct_on is not distinct_on_all:
539
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
540
    
541
    # Columns
542
    query += '\n'
543
    if fields == None: query += '*'
544
    else:
545
        assert fields != []
546
        query += '\n, '.join(map(parse_col, fields))
547
    
548
    # Main table
549
    query += '\nFROM '+table0.to_str(db)
550
    
551
    # Add joins
552
    left_table = table0
553
    for join_ in tables:
554
        table = join_.table
555
        
556
        # Parse special values
557
        if join_.type_ is sql_gen.filter_out: # filter no match
558
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
559
                None))
560
        
561
        query += '\n'+join_.to_str(db, left_table)
562
        
563
        left_table = table
564
    
565
    missing = True
566
    if conds != []:
567
        if len(conds) == 1: whitespace = ' '
568
        else: whitespace = '\n'
569
        query += '\n'+sql_gen.combine_conds([sql_gen.ColValueCond(l, r)
570
            .to_str(db) for l, r in conds], 'WHERE')
571
        missing = False
572
    if order_by != None:
573
        query += '\nORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
574
    if limit != None: query += '\nLIMIT '+str(limit); missing = False
575
    if start != None:
576
        if start != 0: query += '\nOFFSET '+str(start)
577
        missing = False
578
    if missing: warnings.warn(DbWarning(
579
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
580
    
581
    return query
582

    
583
def select(db, *args, **kw_args):
584
    '''For params, see mk_select() and run_query()'''
585
    recover = kw_args.pop('recover', None)
586
    cacheable = kw_args.pop('cacheable', True)
587
    log_level = kw_args.pop('log_level', 2)
588
    
589
    return run_query(db, mk_select(db, *args, **kw_args), recover, cacheable,
590
        log_level=log_level)
591

    
592
def mk_insert_select(db, table, cols=None, select_query=None, returning=None,
593
    embeddable=False):
594
    '''
595
    @param returning str|None An inserted column (such as pkey) to return
596
    @param embeddable Whether the query should be embeddable as a nested SELECT.
597
        Warning: If you set this and cacheable=True when the query is run, the
598
        query will be fully cached, not just if it raises an exception.
599
    '''
600
    table = sql_gen.remove_table_rename(sql_gen.as_Table(table))
601
    if cols == []: cols = None # no cols (all defaults) = unknown col names
602
    if cols != None:
603
        cols = [sql_gen.to_name_only_col(v, table).to_str(db) for v in cols]
604
    if select_query == None: select_query = 'DEFAULT VALUES'
605
    if returning != None: returning = sql_gen.as_Col(returning, table)
606
    
607
    # Build query
608
    first_line = 'INSERT INTO '+table.to_str(db)
609
    query = first_line
610
    if cols != None: query += '\n('+', '.join(cols)+')'
611
    query += '\n'+select_query
612
    
613
    if returning != None:
614
        query += '\nRETURNING '+sql_gen.to_name_only_col(returning).to_str(db)
615
    
616
    if embeddable:
617
        assert returning != None
618
        
619
        # Create function
620
        function_name = sql_gen.clean_name(first_line)
621
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
622
        while True:
623
            try:
624
                function = sql_gen.TempFunction(function_name, db.autocommit)
625
                
626
                function_query = '''\
627
CREATE FUNCTION '''+function.to_str(db)+'''()
628
RETURNS '''+return_type+'''
629
LANGUAGE sql
630
AS $$
631
'''+query+''';
632
$$;
633
'''
634
                run_query(db, function_query, recover=True, cacheable=True,
635
                    log_ignore_excs=(DuplicateFunctionException,))
636
                break # this version was successful
637
            except DuplicateFunctionException, e:
638
                function_name = next_version(function_name)
639
                # try again with next version of name
640
        
641
        # Return query that uses function
642
        func_table = sql_gen.NamedTable('f', sql_gen.FunctionCall(function),
643
            [returning]) # AS clause requires function alias
644
        return mk_select(db, func_table, start=0, order_by=None)
645
    
646
    return query
647

    
648
def insert_select(db, *args, **kw_args):
649
    '''For params, see mk_insert_select() and run_query_into()
650
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
651
        values in
652
    '''
653
    into = kw_args.pop('into', None)
654
    if into != None: kw_args['embeddable'] = True
655
    recover = kw_args.pop('recover', None)
656
    cacheable = kw_args.pop('cacheable', True)
657
    log_level = kw_args.pop('log_level', 2)
658
    
659
    return run_query_into(db, mk_insert_select(db, *args, **kw_args), into,
660
        recover=recover, cacheable=cacheable, log_level=log_level)
661

    
662
default = sql_gen.default # tells insert() to use the default value for a column
663

    
664
def insert(db, table, row, *args, **kw_args):
665
    '''For params, see insert_select()'''
666
    if lists.is_seq(row): cols = None
667
    else:
668
        cols = row.keys()
669
        row = row.values()
670
    row = list(row) # ensure that "== []" works
671
    
672
    if row == []: query = None
673
    else: query = sql_gen.Values(row).to_str(db)
674
    
675
    return insert_select(db, table, cols, query, *args, **kw_args)
676

    
677
def mk_update(db, table, changes=None, cond=None):
678
    '''
679
    @param changes [(col, new_value),...]
680
        * container can be any iterable type
681
        * col: sql_gen.Code|str (for col name)
682
        * new_value: sql_gen.Code|literal value
683
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
684
    @return str query
685
    '''
686
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
687
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
688
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
689
    if cond != None: query += '\nWHERE\n'+cond.to_str(db)
690
    
691
    return query
692

    
693
def update(db, *args, **kw_args):
694
    '''For params, see mk_update() and run_query()'''
695
    recover = kw_args.pop('recover', None)
696
    
697
    return run_query(db, mk_update(db, *args, **kw_args), recover)
698

    
699
def last_insert_id(db):
700
    module = util.root_module(db.db)
701
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
702
    elif module == 'MySQLdb': return db.insert_id()
703
    else: return None
704

    
705
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
706
    '''Creates a mapping from original column names (which may have collisions)
707
    to names that will be distinct among the columns' tables.
708
    This is meant to be used for several tables that are being joined together.
709
    @param cols The columns to combine. Duplicates will be removed.
710
    @param into The table for the new columns.
711
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
712
        columns will be included in the mapping even if they are not in cols.
713
        The tables of the provided Col objects will be changed to into, so make
714
        copies of them if you want to keep the original tables.
715
    @param as_items Whether to return a list of dict items instead of a dict
716
    @return dict(orig_col=new_col, ...)
717
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
718
        * new_col: sql_gen.Col(orig_col_name, into)
719
        * All mappings use the into table so its name can easily be
720
          changed for all columns at once
721
    '''
722
    cols = lists.uniqify(cols)
723
    
724
    items = []
725
    for col in preserve:
726
        orig_col = copy.copy(col)
727
        col.table = into
728
        items.append((orig_col, col))
729
    preserve = set(preserve)
730
    for col in cols:
731
        if col not in preserve:
732
            items.append((col, sql_gen.Col(str(col), into, col.srcs)))
733
    
734
    if not as_items: items = dict(items)
735
    return items
736

    
737
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
738
    '''For params, see mk_flatten_mapping()
739
    @return See return value of mk_flatten_mapping()
740
    '''
741
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
742
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
743
    run_query_into(db, mk_select(db, joins, cols, limit=limit, start=start),
744
        into=into)
745
    return dict(items)
746

    
747
def mk_track_data_error(db, errors_table, cols, value, error_code, error):
748
    cols = map(sql_gen.to_name_only_col, cols)
749
    
750
    columns_cols = ['column']
751
    columns = sql_gen.NamedValues('columns', columns_cols,
752
        [[c.name] for c in cols])
753
    values_cols = ['value', 'error_code', 'error']
754
    values = sql_gen.NamedValues('values', values_cols,
755
        [value, error_code, error])
756
    
757
    select_cols = columns_cols+values_cols
758
    name_only_cols = map(sql_gen.to_name_only_col, select_cols)
759
    errors_table = sql_gen.NamedTable('errors', errors_table)
760
    joins = [columns, sql_gen.Join(values, type_='CROSS'),
761
        sql_gen.Join(errors_table, dict(zip(name_only_cols, select_cols)),
762
        sql_gen.filter_out)]
763
    
764
    return mk_insert_select(db, errors_table, name_only_cols,
765
        mk_select(db, joins, select_cols, order_by=None))
766

    
767
def track_data_error(db, errors_table, *args, **kw_args):
768
    '''
769
    @param errors_table If None, does nothing.
770
    '''
771
    if errors_table == None: return
772
    run_query(db, mk_track_data_error(db, errors_table, *args, **kw_args),
773
        cacheable=True, log_level=4)
774

    
775
def cast(db, type_, col, errors_table=None):
776
    '''Casts an (unrenamed) column or value.
777
    If errors_table set and col has srcs, saves errors in errors_table (using
778
    col's srcs attr as the source columns) and converts errors to warnings.
779
    @param col sql_gen.Col|sql_gen.Literal
780
    @param errors_table None|sql_gen.Table|str
781
    '''
782
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
783
        and col.srcs != ())
784
    if not save_errors: # can't save errors
785
        return sql_gen.CustomCode(col.to_str(db)+'::'+type_) # just cast
786
    
787
    assert not isinstance(col, sql_gen.NamedCol)
788
    
789
    errors_table = sql_gen.as_Table(errors_table)
790
    srcs = map(sql_gen.to_name_only_col, col.srcs)
791
    function_name = str(sql_gen.FunctionCall(type_, *srcs))
792
    function = sql_gen.TempFunction(function_name, db.autocommit)
793
    
794
    while True:
795
        # Create function definition
796
        query = '''\
797
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
798
RETURNS '''+type_+'''
799
LANGUAGE plpgsql
800
STRICT
801
AS $$
802
BEGIN
803
    /* The explicit cast to the return type is needed to make the cast happen
804
    inside the try block. (Implicit casts to the return type happen at the end
805
    of the function, outside any block.) */
806
    RETURN value::'''+type_+''';
807
EXCEPTION
808
    WHEN data_exception THEN
809
        -- Save error in errors table.
810
        -- Insert the value and error for *each* source column.
811
'''+mk_track_data_error(db, errors_table, srcs,
812
    *map(sql_gen.CustomCode, ['value', 'SQLSTATE', 'SQLERRM']))+''';
813
        
814
        RAISE WARNING '%', SQLERRM;
815
        RETURN NULL;
816
END;
817
$$;
818
'''
819
        
820
        # Create function
821
        try:
822
            run_query(db, query, recover=True, cacheable=True,
823
                log_ignore_excs=(DuplicateFunctionException,))
824
            break # successful
825
        except DuplicateFunctionException:
826
            function.name = next_version(function.name)
827
            # try again with next version of name
828
    
829
    return sql_gen.FunctionCall(function, col)
830

    
831
##### Database structure queries
832

    
833
def table_row_count(db, table, recover=None):
834
    return value(run_query(db, mk_select(db, table, [sql_gen.row_count],
835
        order_by=None, start=0), recover=recover, log_level=3))
836

    
837
def table_cols(db, table, recover=None):
838
    return list(col_names(select(db, table, limit=0, order_by=None,
839
        recover=recover, log_level=4)))
840

    
841
def pkey(db, table, recover=None):
842
    '''Assumed to be first column in table'''
843
    return table_cols(db, table, recover)[0]
844

    
845
not_null_col = 'not_null_col'
846

    
847
def table_not_null_col(db, table, recover=None):
848
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
849
    if not_null_col in table_cols(db, table, recover): return not_null_col
850
    else: return pkey(db, table, recover)
851

    
852
def index_cols(db, table, index):
853
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
854
    automatically created. When you don't know whether something is a UNIQUE
855
    constraint or a UNIQUE index, use this function.'''
856
    module = util.root_module(db.db)
857
    if module == 'psycopg2':
858
        return list(values(run_query(db, '''\
859
SELECT attname
860
FROM
861
(
862
        SELECT attnum, attname
863
        FROM pg_index
864
        JOIN pg_class index ON index.oid = indexrelid
865
        JOIN pg_class table_ ON table_.oid = indrelid
866
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
867
        WHERE
868
            table_.relname = '''+db.esc_value(table)+'''
869
            AND index.relname = '''+db.esc_value(index)+'''
870
    UNION
871
        SELECT attnum, attname
872
        FROM
873
        (
874
            SELECT
875
                indrelid
876
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
877
                    AS indkey
878
            FROM pg_index
879
            JOIN pg_class index ON index.oid = indexrelid
880
            JOIN pg_class table_ ON table_.oid = indrelid
881
            WHERE
882
                table_.relname = '''+db.esc_value(table)+'''
883
                AND index.relname = '''+db.esc_value(index)+'''
884
        ) s
885
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
886
) s
887
ORDER BY attnum
888
'''
889
            , cacheable=True, log_level=4)))
890
    else: raise NotImplementedError("Can't list index columns for "+module+
891
        ' database')
892

    
893
def constraint_cols(db, table, constraint):
894
    module = util.root_module(db.db)
895
    if module == 'psycopg2':
896
        return list(values(run_query(db, '''\
897
SELECT attname
898
FROM pg_constraint
899
JOIN pg_class ON pg_class.oid = conrelid
900
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
901
WHERE
902
    relname = '''+db.esc_value(table)+'''
903
    AND conname = '''+db.esc_value(constraint)+'''
904
ORDER BY attnum
905
'''
906
            )))
907
    else: raise NotImplementedError("Can't list constraint columns for "+module+
908
        ' database')
909

    
910
row_num_col = '_row_num'
911

    
912
def add_index(db, exprs, table=None, unique=False):
913
    '''Adds an index on column(s) or expression(s) if it doesn't already exist.
914
    Currently, only function calls are supported as expressions.
915
    '''
916
    if not lists.is_seq(exprs): exprs = [exprs]
917
    
918
    # Parse exprs
919
    old_exprs = exprs[:]
920
    exprs = []
921
    cols = []
922
    for i, expr in enumerate(old_exprs):
923
        expr = copy.deepcopy(expr) # don't modify input!
924
        expr = sql_gen.as_Col(expr)
925
        
926
        # Extract col
927
        if isinstance(expr, sql_gen.FunctionCall):
928
            col = expr.args[0]
929
            expr = sql_gen.Expr(expr)
930
        else: col = expr
931
        
932
        # Extract table
933
        if table == None:
934
            assert sql_gen.is_table_col(col)
935
            table = col.table
936
        
937
        col.table = None
938
        
939
        exprs.append(expr)
940
        cols.append(col)
941
    
942
    table = sql_gen.as_Table(table)
943
    index = sql_gen.Table(str(sql_gen.Col(','.join(map(str, cols)), table)))
944
    
945
    str_ = 'CREATE'
946
    if unique: str_ += ' UNIQUE'
947
    str_ += ' INDEX '+index.to_str(db)+' ON '+table.to_str(db)+' ('+(
948
        ', '.join((v.to_str(db) for v in exprs)))+')'
949
    
950
    try: run_query(db, str_, recover=True, cacheable=True, log_level=3)
951
    except DuplicateTableException: pass # index already existed
952

    
953
def add_pkey(db, table, cols=None, recover=None):
954
    '''Adds a primary key.
955
    @param cols [sql_gen.Col,...] The columns in the primary key.
956
        Defaults to the first column in the table.
957
    @pre The table must not already have a primary key.
958
    '''
959
    table = sql_gen.as_Table(table)
960
    if cols == None: cols = [pkey(db, table, recover)]
961
    col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols]
962
    
963
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY ('
964
        +(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3,
965
        log_ignore_excs=(DuplicateTableException,))
966

    
967
already_indexed = object() # tells add_indexes() the pkey has already been added
968

    
969
def add_indexes(db, table, has_pkey=True):
970
    '''Adds an index on all columns in a table.
971
    @param has_pkey bool|already_indexed Whether a pkey instead of a regular
972
        index should be added on the first column.
973
        * If already_indexed, the pkey is assumed to have already been added
974
    '''
975
    cols = table_cols(db, table)
976
    if has_pkey:
977
        if has_pkey is not already_indexed: add_pkey(db, table)
978
        cols = cols[1:]
979
    for col in cols: add_index(db, col, table)
980

    
981
def add_row_num(db, table):
982
    '''Adds a row number column to a table. Its name is in row_num_col. It will
983
    be the primary key.'''
984
    table = sql_gen.as_Table(table).to_str(db)
985
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
986
        +' serial NOT NULL PRIMARY KEY', log_level=3)
987

    
988
def create_table(db, table, cols, has_pkey=True, col_indexes=True):
989
    '''Creates a table.
990
    @param cols [sql_gen.TypedCol,...] The column names and types
991
    @param has_pkey If set, the first column becomes the primary key.
992
    @param col_indexes bool|[ref]
993
        * If True, indexes will be added on all non-pkey columns.
994
        * If a list reference, [0] will be set to a function to do this.
995
          This can be used to delay index creation until the table is populated.
996
    '''
997
    table = sql_gen.as_Table(table)
998
    
999
    if has_pkey:
1000
        cols[0] = pkey = copy.copy(cols[0]) # don't modify input!
1001
        pkey.type += ' NOT NULL PRIMARY KEY'
1002
    
1003
    str_ = 'CREATE TABLE '+table.to_str(db)+' (\n'
1004
    str_ += '\n, '.join(v.to_str(db) for v in cols)
1005
    str_ += '\n);\n'
1006
    run_query(db, str_, cacheable=True, log_level=2)
1007
    
1008
    # Add indexes
1009
    if has_pkey: has_pkey = already_indexed
1010
    def add_indexes_(): add_indexes(db, table, has_pkey)
1011
    if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer
1012
    elif col_indexes: add_indexes_() # add now
1013

    
1014
def vacuum(db, table):
1015
    table = sql_gen.as_Table(table)
1016
    db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
1017
        log_level=3))
1018

    
1019
def truncate(db, table, schema='public'):
1020
    table = sql_gen.as_Table(table, schema)
1021
    return run_query(db, 'TRUNCATE '+table.to_str(db)+' CASCADE')
1022

    
1023
def tables(db, schema_like='public', table_like='%', exact=False):
1024
    if exact: compare = '='
1025
    else: compare = 'LIKE'
1026
    
1027
    module = util.root_module(db.db)
1028
    if module == 'psycopg2':
1029
        conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)),
1030
            ('tablename', sql_gen.CompareCond(table_like, compare))]
1031
        return values(select(db, 'pg_tables', ['tablename'], conds,
1032
            order_by='tablename', log_level=4))
1033
    elif module == 'MySQLdb':
1034
        return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like)
1035
            , cacheable=True, log_level=4))
1036
    else: raise NotImplementedError("Can't list tables for "+module+' database')
1037

    
1038
def table_exists(db, table):
1039
    table = sql_gen.as_Table(table)
1040
    return list(tables(db, table.schema, table.name, exact=True)) != []
1041

    
1042
def errors_table(db, table, if_exists=True):
1043
    '''
1044
    @param if_exists If set, returns None if the errors table doesn't exist
1045
    @return None|sql_gen.Table
1046
    '''
1047
    table = sql_gen.as_Table(table)
1048
    if table.srcs != (): table = table.srcs[0]
1049
    
1050
    errors_table = sql_gen.suffixed_table(table, '.errors')
1051
    if if_exists and not table_exists(db, errors_table): return None
1052
    return errors_table
1053

    
1054
##### Database management
1055

    
1056
def empty_db(db, schema='public', **kw_args):
1057
    '''For kw_args, see tables()'''
1058
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
1059

    
1060
##### Heuristic queries
1061

    
1062
def put(db, table, row, pkey_=None, row_ct_ref=None):
1063
    '''Recovers from errors.
1064
    Only works under PostgreSQL (uses INSERT RETURNING).
1065
    '''
1066
    row = sql_gen.ColDict(db, table, row)
1067
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
1068
    
1069
    try:
1070
        cur = insert(db, table, row, pkey_, recover=True)
1071
        if row_ct_ref != None and cur.rowcount >= 0:
1072
            row_ct_ref[0] += cur.rowcount
1073
        return value(cur)
1074
    except DuplicateKeyException, e:
1075
        row = sql_gen.ColDict(db, table,
1076
            util.dict_subset_right_join(row, e.cols))
1077
        return value(select(db, table, [pkey_], row, recover=True))
1078

    
1079
def get(db, table, row, pkey, row_ct_ref=None, create=False):
1080
    '''Recovers from errors'''
1081
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
1082
    except StopIteration:
1083
        if not create: raise
1084
        return put(db, table, row, pkey, row_ct_ref) # insert new row
1085

    
1086
def is_func_result(col):
1087
    return col.table.name.find('(') >= 0 and col.name == 'result'
1088

    
1089
def into_table_name(out_table, in_tables0, mapping, is_func):
1090
    def in_col_str(in_col):
1091
        in_col = sql_gen.remove_col_rename(in_col)
1092
        if isinstance(in_col, sql_gen.Col):
1093
            table = in_col.table
1094
            if table == in_tables0:
1095
                in_col = sql_gen.to_name_only_col(in_col)
1096
            elif is_func_result(in_col): in_col = table # omit col name
1097
        return str(in_col)
1098
    
1099
    str_ = str(out_table)
1100
    if is_func:
1101
        str_ += '('
1102
        
1103
        try: value_in_col = mapping['value']
1104
        except KeyError:
1105
            str_ += ', '.join((str(k)+'='+in_col_str(v)
1106
                for k, v in mapping.iteritems()))
1107
        else: str_ += in_col_str(value_in_col)
1108
        
1109
        str_ += ')'
1110
    else: str_ += '_pkeys'
1111
    return str_
1112

    
1113
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
1114
    default=None, is_func=False):
1115
    '''Recovers from errors.
1116
    Only works under PostgreSQL (uses INSERT RETURNING).
1117
    @param in_tables The main input table to select from, followed by a list of
1118
        tables to join with it using the main input table's pkey
1119
    @param mapping dict(out_table_col=in_table_col, ...)
1120
        * out_table_col: str (*not* sql_gen.Col)
1121
        * in_table_col: sql_gen.Col|literal-value
1122
    @param into The table to contain the output and input pkeys.
1123
        Defaults to `out_table.name+'_pkeys'`.
1124
    @param default The *output* column to use as the pkey for missing rows.
1125
        If this output column does not exist in the mapping, uses None.
1126
    @param is_func Whether out_table is the name of a SQL function, not a table
1127
    @return sql_gen.Col Where the output pkeys are made available
1128
    '''
1129
    out_table = sql_gen.as_Table(out_table)
1130
    
1131
    def log_debug(msg): db.log_debug(msg, level=1.5)
1132
    def col_ustr(str_):
1133
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
1134
    
1135
    out_pkey = pkey(db, out_table, recover=True)
1136
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
1137
    
1138
    if mapping == {}: # need at least one column for INSERT SELECT
1139
        mapping = {out_pkey: None} # ColDict will replace with default value
1140
    
1141
    log_debug('********** New iteration **********')
1142
    log_debug('Inserting these input columns into '+strings.as_tt(
1143
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
1144
    
1145
    # Create input joins from list of input tables
1146
    in_tables_ = in_tables[:] # don't modify input!
1147
    in_tables0 = in_tables_.pop(0) # first table is separate
1148
    errors_table_ = errors_table(db, in_tables0)
1149
    in_pkey = pkey(db, in_tables0, recover=True)
1150
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
1151
    input_joins = [in_tables0]+[sql_gen.Join(v,
1152
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
1153
    
1154
    if into == None:
1155
        into = into_table_name(out_table, in_tables0, mapping, is_func)
1156
    into = sql_gen.as_Table(into)
1157
    
1158
    # Set column sources
1159
    in_cols = filter(sql_gen.is_table_col, mapping.values())
1160
    for col in in_cols:
1161
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
1162
    
1163
    log_debug('Joining together input tables into temp table')
1164
    # Place in new table for speed and so don't modify input if values edited
1165
    in_table = sql_gen.Table('in')
1166
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins, in_cols,
1167
        preserve=[in_pkey_col], start=0))
1168
    input_joins = [in_table]
1169
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
1170
    
1171
    mapping = sql_gen.ColDict(db, out_table, mapping)
1172
        # after applying dicts.join() because that returns a plain dict
1173
    
1174
    # Resolve default value column
1175
    try: default = mapping[default]
1176
    except KeyError:
1177
        if default != None:
1178
            db.log_debug('Default value column '
1179
                +strings.as_tt(strings.repr_no_u(default))
1180
                +' does not exist in mapping, falling back to None', level=2.1)
1181
            default = None
1182
    
1183
    pkeys_names = [in_pkey, out_pkey]
1184
    pkeys_cols = [in_pkey_col, out_pkey_col]
1185
    
1186
    pkeys_table_exists_ref = [False]
1187
    def insert_into_pkeys(joins, cols):
1188
        query = mk_select(db, joins, cols, order_by=None, start=0)
1189
        if pkeys_table_exists_ref[0]:
1190
            insert_select(db, into, pkeys_names, query)
1191
        else:
1192
            run_query_into(db, query, into=into)
1193
            pkeys_table_exists_ref[0] = True
1194
    
1195
    limit_ref = [None]
1196
    conds = set()
1197
    distinct_on = []
1198
    def mk_main_select(joins, cols):
1199
        return mk_select(db, joins, cols, conds, distinct_on,
1200
            limit=limit_ref[0], start=0)
1201
    
1202
    exc_strs = set()
1203
    def log_exc(e):
1204
        e_str = exc.str_(e, first_line_only=True)
1205
        log_debug('Caught exception: '+e_str)
1206
        assert e_str not in exc_strs # avoid infinite loops
1207
        exc_strs.add(e_str)
1208
    
1209
    def remove_all_rows():
1210
        log_debug('Returning NULL for all rows')
1211
        limit_ref[0] = 0 # just create an empty pkeys table
1212
    
1213
    def ignore(in_col, value, e):
1214
        track_data_error(db, errors_table_, in_col.srcs, value, e.cause.pgcode,
1215
            e.cause.pgerror)
1216
        
1217
        in_col_str = strings.as_tt(repr(in_col))
1218
        db.log_debug('Adding index on '+in_col_str+' to enable fast filtering',
1219
            level=2.5)
1220
        add_index(db, in_col)
1221
        
1222
        log_debug('Ignoring rows with '+in_col_str+' = '
1223
            +strings.as_tt(repr(value)))
1224
    def remove_rows(in_col, value, e):
1225
        ignore(in_col, value, e)
1226
        cond = (in_col, sql_gen.CompareCond(value, '!='))
1227
        assert cond not in conds # avoid infinite loops
1228
        conds.add(cond)
1229
    def invalid2null(in_col, value, e):
1230
        ignore(in_col, value, e)
1231
        update(db, in_table, [(in_col, None)],
1232
            sql_gen.ColValueCond(in_col, value))
1233
    
1234
    def insert_pkeys_table(which):
1235
        return sql_gen.Table(sql_gen.add_suffix(in_table.name,
1236
            '_insert_'+which+'_pkeys'))
1237
    insert_out_pkeys = insert_pkeys_table('out')
1238
    insert_in_pkeys = insert_pkeys_table('in')
1239
    
1240
    # Do inserts and selects
1241
    join_cols = sql_gen.ColDict(db, out_table)
1242
    while True:
1243
        if limit_ref[0] == 0: # special case
1244
            log_debug('Creating an empty pkeys table')
1245
            cur = run_query_into(db, mk_select(db, out_table, [out_pkey],
1246
                limit=limit_ref[0]), into=insert_out_pkeys)
1247
            break # don't do main case
1248
        
1249
        has_joins = join_cols != {}
1250
        
1251
        # Prepare to insert new rows
1252
        insert_joins = input_joins[:] # don't modify original!
1253
        insert_args = dict(recover=True, cacheable=False)
1254
        if has_joins:
1255
            distinct_on = [v.to_Col() for v in join_cols.values()]
1256
            insert_joins.append(sql_gen.Join(out_table, join_cols,
1257
                sql_gen.filter_out))
1258
        else:
1259
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
1260
        main_select = mk_main_select(insert_joins, mapping.values())
1261
        
1262
        log_debug('Trying to insert new rows')
1263
        try:
1264
            cur = insert_select(db, out_table, mapping.keys(), main_select,
1265
                **insert_args)
1266
            break # insert successful
1267
        except DuplicateKeyException, e:
1268
            log_exc(e)
1269
            
1270
            old_join_cols = join_cols.copy()
1271
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
1272
            log_debug('Ignoring existing rows, comparing on these columns:\n'
1273
                +strings.as_inline_table(join_cols, ustr=col_ustr))
1274
            assert join_cols != old_join_cols # avoid infinite loops
1275
        except NullValueException, e:
1276
            log_exc(e)
1277
            
1278
            out_col, = e.cols
1279
            try: in_col = mapping[out_col]
1280
            except KeyError:
1281
                log_debug('Missing mapping for NOT NULL column '+out_col)
1282
                remove_all_rows()
1283
            else: remove_rows(in_col, None, e)
1284
        except FunctionValueException, e:
1285
            log_exc(e)
1286
            
1287
            func_name = e.name
1288
            value = e.value
1289
            for out_col, in_col in mapping.iteritems():
1290
                in_col = sql_gen.unwrap_func_call(in_col, func_name)
1291
                invalid2null(in_col, value, e)
1292
        except MissingCastException, e:
1293
            log_exc(e)
1294
            
1295
            out_col = e.col
1296
            type_ = e.type
1297
            
1298
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
1299
                +strings.as_tt(type_))
1300
            def wrap_func(col): return cast(db, type_, col, errors_table_)
1301
            mapping[out_col] = sql_gen.wrap(wrap_func, mapping[out_col])
1302
        except DatabaseErrors, e:
1303
            log_exc(e)
1304
            
1305
            msg = 'No handler for exception: '+exc.str_(e)
1306
            warnings.warn(DbWarning(msg))
1307
            log_debug(msg)
1308
            remove_all_rows()
1309
        # after exception handled, rerun loop with additional constraints
1310
    
1311
    if row_ct_ref != None and cur.rowcount >= 0:
1312
        row_ct_ref[0] += cur.rowcount
1313
    
1314
    if has_joins:
1315
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
1316
        log_debug('Getting output table pkeys of existing/inserted rows')
1317
        insert_into_pkeys(select_joins, pkeys_cols)
1318
    else:
1319
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
1320
        
1321
        log_debug('Getting input table pkeys of inserted rows')
1322
        run_query_into(db, mk_main_select(input_joins, [in_pkey]),
1323
            into=insert_in_pkeys)
1324
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
1325
        
1326
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1327
            insert_in_pkeys)
1328
        
1329
        log_debug('Combining output and input pkeys in inserted order')
1330
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1331
            {row_num_col: sql_gen.join_same_not_null})]
1332
        insert_into_pkeys(pkey_joins, pkeys_names)
1333
    
1334
    db.log_debug('Adding pkey on pkeys table to enable fast joins', level=2.5)
1335
    add_pkey(db, into)
1336
    
1337
    log_debug('Setting pkeys of missing rows to '+strings.as_tt(repr(default)))
1338
    missing_rows_joins = input_joins+[sql_gen.Join(into,
1339
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1340
        # must use join_same_not_null or query will take forever
1341
    insert_into_pkeys(missing_rows_joins,
1342
        [in_pkey_col, sql_gen.NamedCol(out_pkey, default)])
1343
    
1344
    assert table_row_count(db, into) == table_row_count(db, in_table)
1345
    
1346
    srcs = []
1347
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
1348
    return sql_gen.Col(out_pkey, into, srcs)
1349

    
1350
##### Data cleanup
1351

    
1352
def cleanup_table(db, table, cols):
1353
    table = sql_gen.as_Table(table)
1354
    cols = map(sql_gen.as_Col, cols)
1355
    
1356
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
1357
        +db.esc_value(r'\N')+')')
1358
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
1359
        for v in cols]
1360
    
1361
    update(db, table, changes)
(24-24/36)