Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+str(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+str(name), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self,
45
            'for name: '+str(name)+'; value: '+repr(value), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+name+ ' constraint on columns: '
52
            +(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class NameException(DbException): pass
57

    
58
class DuplicateKeyException(ConstraintException): pass
59

    
60
class NullValueException(ConstraintException): pass
61

    
62
class FunctionValueException(ExceptionWithNameValue): pass
63

    
64
class DuplicateTableException(ExceptionWithName): pass
65

    
66
class DuplicateFunctionException(ExceptionWithName): pass
67

    
68
class EmptyRowException(DbException): pass
69

    
70
##### Warnings
71

    
72
class DbWarning(UserWarning): pass
73

    
74
##### Result retrieval
75

    
76
def col_names(cur): return (col[0] for col in cur.description)
77

    
78
def rows(cur): return iter(lambda: cur.fetchone(), None)
79

    
80
def consume_rows(cur):
81
    '''Used to fetch all rows so result will be cached'''
82
    iters.consume_iter(rows(cur))
83

    
84
def next_row(cur): return rows(cur).next()
85

    
86
def row(cur):
87
    row_ = next_row(cur)
88
    consume_rows(cur)
89
    return row_
90

    
91
def next_value(cur): return next_row(cur)[0]
92

    
93
def value(cur): return row(cur)[0]
94

    
95
def values(cur): return iters.func_iter(lambda: next_value(cur))
96

    
97
def value_or_none(cur):
98
    try: return value(cur)
99
    except StopIteration: return None
100

    
101
##### Input validation
102

    
103
def clean_name(name): return re.sub(r'\W', r'', name.replace('.', '_'))
104

    
105
def check_name(name):
106
    if re.search(r'\W', name) != None: raise NameException('Name "'+name
107
        +'" may contain only alphanumeric characters and _')
108

    
109
def esc_name_by_module(module, name, ignore_case=False):
110
    if module == 'psycopg2' or module == None:
111
        if ignore_case:
112
            # Don't enclose in quotes because this disables case-insensitivity
113
            check_name(name)
114
            return name
115
        else: quote = '"'
116
    elif module == 'MySQLdb': quote = '`'
117
    else: raise NotImplementedError("Can't escape name for "+module+' database')
118
    return quote + name.replace(quote, '') + quote
119

    
120
def esc_name_by_engine(engine, name, **kw_args):
121
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
122

    
123
def esc_name(db, name, **kw_args):
124
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
125

    
126
def qual_name(db, schema, table):
127
    def esc_name_(name): return esc_name(db, name)
128
    table = esc_name_(table)
129
    if schema != None: return esc_name_(schema)+'.'+table
130
    else: return table
131

    
132
##### Database connections
133

    
134
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
135

    
136
db_engines = {
137
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
138
    'PostgreSQL': ('psycopg2', {}),
139
}
140

    
141
DatabaseErrors_set = set([DbException])
142
DatabaseErrors = tuple(DatabaseErrors_set)
143

    
144
def _add_module(module):
145
    DatabaseErrors_set.add(module.DatabaseError)
146
    global DatabaseErrors
147
    DatabaseErrors = tuple(DatabaseErrors_set)
148

    
149
def db_config_str(db_config):
150
    return db_config['engine']+' database '+db_config['database']
151

    
152
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
153

    
154
log_debug_none = lambda msg: None
155

    
156
class DbConn:
157
    def __init__(self, db_config, serializable=True, autocommit=False,
158
        caching=True, log_debug=log_debug_none):
159
        self.db_config = db_config
160
        self.serializable = serializable
161
        self.autocommit = autocommit
162
        self.caching = caching
163
        self.log_debug = log_debug
164
        self.debug = log_debug != log_debug_none
165
        
166
        self.__db = None
167
        self.query_results = {}
168
        self._savepoint = 0
169
    
170
    def __getattr__(self, name):
171
        if name == '__dict__': raise Exception('getting __dict__')
172
        if name == 'db': return self._db()
173
        else: raise AttributeError()
174
    
175
    def __getstate__(self):
176
        state = copy.copy(self.__dict__) # shallow copy
177
        state['log_debug'] = None # don't pickle the debug callback
178
        state['_DbConn__db'] = None # don't pickle the connection
179
        return state
180
    
181
    def connected(self): return self.__db != None
182
    
183
    def _db(self):
184
        if self.__db == None:
185
            # Process db_config
186
            db_config = self.db_config.copy() # don't modify input!
187
            schemas = db_config.pop('schemas', None)
188
            module_name, mappings = db_engines[db_config.pop('engine')]
189
            module = __import__(module_name)
190
            _add_module(module)
191
            for orig, new in mappings.iteritems():
192
                try: util.rename_key(db_config, orig, new)
193
                except KeyError: pass
194
            
195
            # Connect
196
            self.__db = module.connect(**db_config)
197
            
198
            # Configure connection
199
            if self.serializable and not self.autocommit: run_raw_query(self,
200
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE')
201
            if schemas != None:
202
                schemas_ = ''.join((esc_name(self, s)+', '
203
                    for s in schemas.split(',')))
204
                run_raw_query(self, "SELECT set_config('search_path', \
205
%s || current_setting('search_path'), false)", [schemas_])
206
        
207
        return self.__db
208
    
209
    class DbCursor(Proxy):
210
        def __init__(self, outer):
211
            Proxy.__init__(self, outer.db.cursor())
212
            self.outer = outer
213
            self.query_results = outer.query_results
214
            self.query_lookup = None
215
            self.result = []
216
        
217
        def execute(self, query, params=None):
218
            self._is_insert = query.upper().find('INSERT') >= 0
219
            self.query_lookup = _query_lookup(query, params)
220
            try:
221
                try:
222
                    return_value = self.inner.execute(query, params)
223
                    self.outer.do_autocommit()
224
                finally: self.query = get_cur_query(self.inner)
225
            except Exception, e:
226
                _add_cursor_info(e, self, query, params)
227
                self.result = e # cache the exception as the result
228
                self._cache_result()
229
                raise
230
            # Fetch all rows so result will be cached
231
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
232
            return return_value
233
        
234
        def fetchone(self):
235
            row = self.inner.fetchone()
236
            if row != None: self.result.append(row)
237
            # otherwise, fetched all rows
238
            else: self._cache_result()
239
            return row
240
        
241
        def _cache_result(self):
242
            # For inserts, only cache exceptions since inserts are not
243
            # idempotent, but an invalid insert will always be invalid
244
            if self.query_results != None and (not self._is_insert
245
                or isinstance(self.result, Exception)):
246
                
247
                assert self.query_lookup != None
248
                self.query_results[self.query_lookup] = self.CacheCursor(
249
                    util.dict_subset(dicts.AttrsDictView(self),
250
                    ['query', 'result', 'rowcount', 'description']))
251
        
252
        class CacheCursor:
253
            def __init__(self, cached_result): self.__dict__ = cached_result
254
            
255
            def execute(self, *args, **kw_args):
256
                if isinstance(self.result, Exception): raise self.result
257
                # otherwise, result is a rows list
258
                self.iter = iter(self.result)
259
            
260
            def fetchone(self):
261
                try: return self.iter.next()
262
                except StopIteration: return None
263
    
264
    def esc_value(self, value):
265
        module = util.root_module(self.db)
266
        if module == 'psycopg2': str_ = self.db.cursor().mogrify('%s', [value])
267
        elif module == 'MySQLdb':
268
            import _mysql
269
            str_ = _mysql.escape_string(value)
270
        else: raise NotImplementedError("Can't escape value for "+module
271
            +' database')
272
        return strings.to_unicode(str_)
273
    
274
    def esc_name(self, name): return esc_name(self, name) # calls global func
275
    
276
    def run_query(self, query, params=None, cacheable=False, log_level=2):
277
        assert query != None
278
        
279
        if not self.caching: cacheable = False
280
        used_cache = False
281
        try:
282
            # Get cursor
283
            if cacheable:
284
                query_lookup = _query_lookup(query, params)
285
                try:
286
                    cur = self.query_results[query_lookup]
287
                    used_cache = True
288
                except KeyError: cur = self.DbCursor(self)
289
            else: cur = self.db.cursor()
290
            
291
            # Run query
292
            cur.execute(query, params)
293
        finally:
294
            if self.debug: # only compute msg if needed
295
                if used_cache: cache_status = 'Cache hit'
296
                elif cacheable: cache_status = 'Cache miss'
297
                else: cache_status = 'Non-cacheable'
298
                self.log_debug(cache_status+': '+strings.one_line(
299
                    str(get_cur_query(cur, query, params))), log_level)
300
        
301
        return cur
302
    
303
    def is_cached(self, query, params=None):
304
        return _query_lookup(query, params) in self.query_results
305
    
306
    def with_savepoint(self, func):
307
        savepoint = 'level_'+str(self._savepoint)
308
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
309
        self._savepoint += 1
310
        try: 
311
            try: return_val = func()
312
            finally:
313
                self._savepoint -= 1
314
                assert self._savepoint >= 0
315
        except:
316
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
317
            raise
318
        else:
319
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
320
            self.do_autocommit()
321
            return return_val
322
    
323
    def do_autocommit(self):
324
        '''Autocommits if outside savepoint'''
325
        assert self._savepoint >= 0
326
        if self.autocommit and self._savepoint == 0:
327
            self.log_debug('Autocommiting')
328
            self.db.commit()
329

    
330
connect = DbConn
331

    
332
##### Querying
333

    
334
def run_raw_query(db, *args, **kw_args):
335
    '''For params, see DbConn.run_query()'''
336
    return db.run_query(*args, **kw_args)
337

    
338
def mogrify(db, query, params):
339
    module = util.root_module(db.db)
340
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
341
    else: raise NotImplementedError("Can't mogrify query for "+module+
342
        ' database')
343

    
344
##### Recoverable querying
345

    
346
def with_savepoint(db, func): return db.with_savepoint(func)
347

    
348
def run_query(db, query, params=None, recover=None, cacheable=False, **kw_args):
349
    '''For params, see run_raw_query()'''
350
    if recover == None: recover = False
351
    
352
    try:
353
        def run(): return run_raw_query(db, query, params, cacheable, **kw_args)
354
        if recover and not db.is_cached(query, params):
355
            return with_savepoint(db, run)
356
        else: return run() # don't need savepoint if cached
357
    except Exception, e:
358
        if not recover: raise # need savepoint to run index_cols()
359
        msg = exc.str_(e)
360
        
361
        match = re.search(r'duplicate key value violates unique constraint '
362
            r'"((_?[^\W_]+)_[^"]+?)"', msg)
363
        if match:
364
            constraint, table = match.groups()
365
            try: cols = index_cols(db, table, constraint)
366
            except NotImplementedError: raise e
367
            else: raise DuplicateKeyException(constraint, cols, e)
368
        
369
        match = re.search(r'null value in column "(\w+?)" violates not-null '
370
            r'constraint', msg)
371
        if match: raise NullValueException('NOT NULL', [match.group(1)], e)
372
        
373
        match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
374
            r'|date/time field value out of range): "(.+?)"\n'
375
            r'(?:(?s).*?)\bfunction "(\w+?)".*?\bat assignment', msg)
376
        if match:
377
            value, name = match.groups()
378
            raise FunctionValueException(name, strings.to_unicode(value), e)
379
        
380
        match = re.search(r'relation "(\w+?)" already exists', msg)
381
        if match: raise DuplicateTableException(match.group(1), e)
382
        
383
        match = re.search(r'function "(\w+?)" already exists', msg)
384
        if match: raise DuplicateFunctionException(match.group(1), e)
385
        
386
        raise # no specific exception raised
387

    
388
##### Basic queries
389

    
390
def next_version(name):
391
    '''Prepends the version # so it won't be removed if the name is truncated'''
392
    version = 1 # first existing name was version 0
393
    match = re.match(r'^v(\d+)_(.*)$', name)
394
    if match:
395
        version = int(match.group(1))+1
396
        name = match.group(2)
397
    return 'v'+str(version)+'_'+name
398

    
399
def run_query_into(db, query, params, into=None, *args, **kw_args):
400
    '''Outputs a query to a temp table.
401
    For params, see run_query().
402
    '''
403
    if into == None: return run_query(db, query, params, *args, **kw_args)
404
    else: # place rows in temp table
405
        assert isinstance(into, sql_gen.Table)
406
        
407
        kw_args['recover'] = True
408
        
409
        temp = not db.debug # tables are created as permanent in debug mode
410
        # "temporary tables cannot specify a schema name", so remove schema
411
        if temp: into.schema = None
412
        
413
        while True:
414
            try:
415
                create_query = 'CREATE'
416
                if temp: create_query += ' TEMP'
417
                create_query += ' TABLE '+into.to_str(db)+' AS '+query
418
                
419
                return run_query(db, create_query, params, *args, **kw_args)
420
                    # CREATE TABLE AS sets rowcount to # rows in query
421
            except DuplicateTableException, e:
422
                into.name = next_version(into.name)
423
                # try again with next version of name
424

    
425
order_by_pkey = object() # tells mk_select() to order by the pkey
426

    
427
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
428

    
429
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
430
    start=None, order_by=order_by_pkey, default_table=None):
431
    '''
432
    @param tables The single table to select from, or a list of tables to join
433
        together, with tables after the first being sql_gen.Join objects
434
    @param fields Use None to select all fields in the table
435
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
436
        * container can be any iterable type
437
        * compare_left_side: sql_gen.Code|str (for col name)
438
        * compare_right_side: sql_gen.ValueCond|literal value
439
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
440
        use all columns
441
    @return tuple(query, params)
442
    '''
443
    # Parse tables param
444
    if not lists.is_seq(tables): tables = [tables]
445
    tables = list(tables) # don't modify input! (list() copies input)
446
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
447
    
448
    # Parse other params
449
    if conds == None: conds = []
450
    elif isinstance(conds, dict): conds = conds.items()
451
    conds = list(conds) # don't modify input! (list() copies input)
452
    assert limit == None or type(limit) == int
453
    assert start == None or type(start) == int
454
    if order_by is order_by_pkey:
455
        if distinct_on != []: order_by = None
456
        else: order_by = pkey(db, table0, recover=True)
457
    
458
    query = 'SELECT'
459
    
460
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
461
    
462
    # DISTINCT ON columns
463
    if distinct_on != []:
464
        query += ' DISTINCT'
465
        if distinct_on is not distinct_on_all:
466
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
467
    
468
    # Columns
469
    query += ' '
470
    if fields == None: query += '*'
471
    else: query += ', '.join(map(parse_col, fields))
472
    
473
    # Main table
474
    query += ' FROM '+table0.to_str(db)
475
    
476
    # Add joins
477
    left_table = table0
478
    for join_ in tables:
479
        table = join_.table
480
        
481
        # Parse special values
482
        if join_.type_ is sql_gen.filter_out: # filter no match
483
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
484
                None))
485
        
486
        query += ' '+join_.to_str(db, left_table)
487
        
488
        left_table = table
489
    
490
    missing = True
491
    if conds != []:
492
        query += ' WHERE '+(' AND '.join(('('+sql_gen.ColValueCond(l, r)
493
            .to_str(db)+')' for l, r in conds)))
494
        missing = False
495
    if order_by != None:
496
        query += ' ORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
497
    if limit != None: query += ' LIMIT '+str(limit); missing = False
498
    if start != None:
499
        if start != 0: query += ' OFFSET '+str(start)
500
        missing = False
501
    if missing: warnings.warn(DbWarning(
502
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
503
    
504
    return (query, [])
505

    
506
def select(db, *args, **kw_args):
507
    '''For params, see mk_select() and run_query()'''
508
    recover = kw_args.pop('recover', None)
509
    cacheable = kw_args.pop('cacheable', True)
510
    log_level = kw_args.pop('log_level', 2)
511
    
512
    query, params = mk_select(db, *args, **kw_args)
513
    return run_query(db, query, params, recover, cacheable, log_level=log_level)
514

    
515
def mk_insert_select(db, table, cols=None, select_query=None, params=None,
516
    returning=None, embeddable=False):
517
    '''
518
    @param returning str|None An inserted column (such as pkey) to return
519
    @param embeddable Whether the query should be embeddable as a nested SELECT.
520
        Warning: If you set this and cacheable=True when the query is run, the
521
        query will be fully cached, not just if it raises an exception.
522
    '''
523
    table = sql_gen.as_Table(table)
524
    if cols == []: cols = None # no cols (all defaults) = unknown col names
525
    if cols != None: cols = [sql_gen.as_Col(v).to_str(db) for v in cols]
526
    if select_query == None: select_query = 'DEFAULT VALUES'
527
    if returning != None: returning = sql_gen.as_Col(returning, table)
528
    
529
    # Build query
530
    query = 'INSERT INTO '+table.to_str(db)
531
    if cols != None: query += ' ('+', '.join(cols)+')'
532
    query += ' '+select_query
533
    
534
    if returning != None:
535
        returning_name = copy.copy(returning)
536
        returning_name.table = None
537
        returning_name = returning_name.to_str(db)
538
        query += ' RETURNING '+returning_name
539
    
540
    if embeddable:
541
        assert returning != None
542
        
543
        # Create function
544
        function_name = '_'.join(['insert', table.name] + cols)
545
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
546
        while True:
547
            try:
548
                func_schema = None
549
                if not db.debug: func_schema = 'pg_temp'
550
                function = sql_gen.Table(function_name, func_schema).to_str(db)
551
                
552
                function_query = '''\
553
CREATE FUNCTION '''+function+'''() RETURNS '''+return_type+'''
554
    LANGUAGE sql
555
    AS $$'''+mogrify(db, query, params)+''';$$;
556
'''
557
                run_query(db, function_query, recover=True, cacheable=True)
558
                break # this version was successful
559
            except DuplicateFunctionException, e:
560
                function_name = next_version(function_name)
561
                # try again with next version of name
562
        
563
        # Return query that uses function
564
        func_table = sql_gen.NamedTable('f', sql_gen.CustomCode(function+'()'),
565
            [returning_name]) # AS clause requires function alias
566
        return mk_select(db, func_table, start=0, order_by=None)
567
    
568
    return (query, params)
569

    
570
def insert_select(db, *args, **kw_args):
571
    '''For params, see mk_insert_select() and run_query_into()
572
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
573
        values in
574
    '''
575
    into = kw_args.pop('into', None)
576
    if into != None: kw_args['embeddable'] = True
577
    recover = kw_args.pop('recover', None)
578
    cacheable = kw_args.pop('cacheable', True)
579
    
580
    query, params = mk_insert_select(db, *args, **kw_args)
581
    return run_query_into(db, query, params, into, recover=recover,
582
        cacheable=cacheable)
583

    
584
default = object() # tells insert() to use the default value for a column
585

    
586
def insert(db, table, row, *args, **kw_args):
587
    '''For params, see insert_select()'''
588
    if lists.is_seq(row): cols = None
589
    else:
590
        cols = row.keys()
591
        row = row.values()
592
    row = list(row) # ensure that "!= []" works
593
    
594
    # Check for special values
595
    labels = []
596
    values = []
597
    for value in row:
598
        if value is default: labels.append('DEFAULT')
599
        else:
600
            labels.append('%s')
601
            values.append(value)
602
    
603
    # Build query
604
    if values != []: query = ' VALUES ('+(', '.join(labels))+')'
605
    else: query = None
606
    
607
    return insert_select(db, table, cols, query, values, *args, **kw_args)
608

    
609
def mk_update(db, table, changes=None, cond=None):
610
    '''
611
    @param changes [(col, new_value),...]
612
        * container can be any iterable type
613
        * col: sql_gen.Code|str (for col name)
614
        * new_value: sql_gen.Code|literal value
615
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
616
    @return str query
617
    '''
618
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
619
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
620
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
621
    if cond != None: query += ' WHERE '+cond.to_str(db)
622
    
623
    return query
624

    
625
def update(db, *args, **kw_args):
626
    '''For params, see mk_update() and run_query()'''
627
    recover = kw_args.pop('recover', None)
628
    
629
    return run_query(db, mk_update(db, *args, **kw_args), [], recover)
630

    
631
def last_insert_id(db):
632
    module = util.root_module(db.db)
633
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
634
    elif module == 'MySQLdb': return db.insert_id()
635
    else: return None
636

    
637
def truncate(db, table, schema='public'):
638
    return run_query(db, 'TRUNCATE '+qual_name(db, schema, table)+' CASCADE')
639

    
640
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
641
    '''Creates a mapping from original column names (which may have collisions)
642
    to names that will be distinct among the columns' tables.
643
    This is meant to be used for several tables that are being joined together.
644
    @param cols The columns to combine. Duplicates will be removed.
645
    @param into The table for the new columns.
646
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
647
        columns will be included in the mapping even if they are not in cols.
648
        The tables of the provided Col objects will be changed to into, so make
649
        copies of them if you want to keep the original tables.
650
    @param as_items Whether to return a list of dict items instead of a dict
651
    @return dict(orig_col=new_col, ...)
652
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
653
        * new_col: sql_gen.Col(orig_col_name, into)
654
        * All mappings use the into table so its name can easily be
655
          changed for all columns at once
656
    '''
657
    cols = lists.uniqify(cols)
658
    
659
    items = []
660
    for col in preserve:
661
        orig_col = copy.copy(col)
662
        col.table = into
663
        items.append((orig_col, col))
664
    preserve = set(preserve)
665
    for col in cols:
666
        if col not in preserve:
667
            items.append((col, sql_gen.Col(clean_name(str(col)), into)))
668
    
669
    if not as_items: items = dict(items)
670
    return items
671

    
672
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
673
    '''For params, see mk_flatten_mapping()
674
    @return See return value of mk_flatten_mapping()
675
    '''
676
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
677
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
678
    run_query_into(db, *mk_select(db, joins, cols, limit=limit, start=start),
679
        into=into)
680
    return dict(items)
681

    
682
##### Database structure queries
683

    
684
def table_row_count(db, table, recover=None):
685
    return value(run_query(db, *mk_select(db, table, [sql_gen.row_count],
686
        order_by=None, start=0), recover=recover, log_level=3))
687

    
688
def table_cols(db, table, recover=None):
689
    return list(col_names(select(db, table, limit=0, order_by=None,
690
        recover=recover, log_level=4)))
691

    
692
def pkey(db, table, recover=None):
693
    '''Assumed to be first column in table'''
694
    return table_cols(db, table, recover)[0]
695

    
696
not_null_col = 'not_null'
697

    
698
def table_not_null_col(db, table, recover=None):
699
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
700
    if not_null_col in table_cols(db, table, recover): return not_null_col
701
    else: return pkey(db, table, recover)
702

    
703
def index_cols(db, table, index):
704
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
705
    automatically created. When you don't know whether something is a UNIQUE
706
    constraint or a UNIQUE index, use this function.'''
707
    module = util.root_module(db.db)
708
    if module == 'psycopg2':
709
        return list(values(run_query(db, '''\
710
SELECT attname
711
FROM
712
(
713
        SELECT attnum, attname
714
        FROM pg_index
715
        JOIN pg_class index ON index.oid = indexrelid
716
        JOIN pg_class table_ ON table_.oid = indrelid
717
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
718
        WHERE
719
            table_.relname = %(table)s
720
            AND index.relname = %(index)s
721
    UNION
722
        SELECT attnum, attname
723
        FROM
724
        (
725
            SELECT
726
                indrelid
727
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
728
                    AS indkey
729
            FROM pg_index
730
            JOIN pg_class index ON index.oid = indexrelid
731
            JOIN pg_class table_ ON table_.oid = indrelid
732
            WHERE
733
                table_.relname = %(table)s
734
                AND index.relname = %(index)s
735
        ) s
736
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
737
) s
738
ORDER BY attnum
739
''',
740
            {'table': table, 'index': index}, cacheable=True, log_level=4)))
741
    else: raise NotImplementedError("Can't list index columns for "+module+
742
        ' database')
743

    
744
def constraint_cols(db, table, constraint):
745
    module = util.root_module(db.db)
746
    if module == 'psycopg2':
747
        return list(values(run_query(db, '''\
748
SELECT attname
749
FROM pg_constraint
750
JOIN pg_class ON pg_class.oid = conrelid
751
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
752
WHERE
753
    relname = %(table)s
754
    AND conname = %(constraint)s
755
ORDER BY attnum
756
''',
757
            {'table': table, 'constraint': constraint})))
758
    else: raise NotImplementedError("Can't list constraint columns for "+module+
759
        ' database')
760

    
761
row_num_col = '_row_num'
762

    
763
def index_col(db, col):
764
    '''Adds an index on a column if it doesn't already exist.'''
765
    assert sql_gen.is_table_col(col)
766
    
767
    table = col.table
768
    index = sql_gen.as_Table(clean_name(str(col)))
769
    col = sql_gen.to_name_only_col(col)
770
    try: run_query(db, 'CREATE INDEX '+index.to_str(db)+' ON '+table.to_str(db)
771
        +' ('+col.to_str(db)+')', recover=True, cacheable=True, log_level=3)
772
    except DuplicateTableException: pass # index already existed
773

    
774
def index_pkey(db, table, recover=None):
775
    '''Makes the first column in a table the primary key.
776
    @pre The table must not already have a primary key.
777
    '''
778
    table = sql_gen.as_Table(table)
779
    
780
    index = sql_gen.as_Table(table.name+'_pkey')
781
    col = sql_gen.to_name_only_col(pkey(db, table, recover))
782
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD CONSTRAINT '
783
        +index.to_str(db)+' PRIMARY KEY('+col.to_str(db)+')', recover=recover,
784
        log_level=3)
785

    
786
def add_row_num(db, table):
787
    '''Adds a row number column to a table. Its name is in row_num_col. It will
788
    be the primary key.'''
789
    table = sql_gen.as_Table(table).to_str(db)
790
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
791
        +' serial NOT NULL PRIMARY KEY', log_level=3)
792

    
793
def tables(db, schema='public', table_like='%'):
794
    module = util.root_module(db.db)
795
    params = {'schema': schema, 'table_like': table_like}
796
    if module == 'psycopg2':
797
        return values(run_query(db, '''\
798
SELECT tablename
799
FROM pg_tables
800
WHERE
801
    schemaname = %(schema)s
802
    AND tablename LIKE %(table_like)s
803
ORDER BY tablename
804
''',
805
            params, cacheable=True))
806
    elif module == 'MySQLdb':
807
        return values(run_query(db, 'SHOW TABLES LIKE %(table_like)s', params,
808
            cacheable=True))
809
    else: raise NotImplementedError("Can't list tables for "+module+' database')
810

    
811
##### Database management
812

    
813
def empty_db(db, schema='public', **kw_args):
814
    '''For kw_args, see tables()'''
815
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
816

    
817
##### Heuristic queries
818

    
819
def put(db, table, row, pkey_=None, row_ct_ref=None):
820
    '''Recovers from errors.
821
    Only works under PostgreSQL (uses INSERT RETURNING).
822
    '''
823
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
824
    
825
    try:
826
        cur = insert(db, table, row, pkey_, recover=True)
827
        if row_ct_ref != None and cur.rowcount >= 0:
828
            row_ct_ref[0] += cur.rowcount
829
        return value(cur)
830
    except DuplicateKeyException, e:
831
        return value(select(db, table, [pkey_],
832
            util.dict_subset_right_join(row, e.cols), recover=True))
833

    
834
def get(db, table, row, pkey, row_ct_ref=None, create=False):
835
    '''Recovers from errors'''
836
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
837
    except StopIteration:
838
        if not create: raise
839
        return put(db, table, row, pkey, row_ct_ref) # insert new row
840

    
841
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None):
842
    '''Recovers from errors.
843
    Only works under PostgreSQL (uses INSERT RETURNING).
844
    @param in_tables The main input table to select from, followed by a list of
845
        tables to join with it using the main input table's pkey
846
    @param mapping dict(out_table_col=in_table_col, ...)
847
        * out_table_col: sql_gen.Col|str
848
        * in_table_col: sql_gen.Col Wrap literal values in a sql_gen.NamedCol
849
    @return sql_gen.Col Where the output pkeys are made available
850
    '''
851
    out_table = sql_gen.as_Table(out_table)
852
    for in_table_col in mapping.itervalues():
853
        assert isinstance(in_table_col, sql_gen.Col)
854
    
855
    temp_prefix = out_table.name
856
    pkeys = sql_gen.Table(temp_prefix+'_pkeys')
857
    
858
    # Create input joins from list of input tables
859
    in_tables_ = in_tables[:] # don't modify input!
860
    in_tables0 = in_tables_.pop(0) # first table is separate
861
    in_pkey = pkey(db, in_tables0, recover=True)
862
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
863
    input_joins = [in_tables0]+[sql_gen.Join(v, {in_pkey: sql_gen.join_same})
864
        for v in in_tables_]
865
    
866
    db.log_debug('Joining together input tables')
867
    # Place in new table for speed and so don't modify input if values edited
868
    in_table = sql_gen.Table(temp_prefix+'_in')
869
    flatten_cols = filter(sql_gen.is_table_col, mapping.values())
870
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins,
871
        flatten_cols, preserve=[in_pkey_col], start=0))
872
    input_joins = [in_table]
873
    
874
    out_pkey = pkey(db, out_table, recover=True)
875
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
876
    
877
    pkeys_names = [in_pkey, out_pkey]
878
    pkeys_cols = [in_pkey_col, out_pkey_col]
879
    
880
    pkeys_table_exists_ref = [False]
881
    def insert_into_pkeys(joins, cols):
882
        query, params = mk_select(db, joins, cols, order_by=None, start=0)
883
        if pkeys_table_exists_ref[0]:
884
            insert_select(db, pkeys, pkeys_names, query, params)
885
        else:
886
            run_query_into(db, query, params, into=pkeys)
887
            pkeys_table_exists_ref[0] = True
888
    
889
    limit_ref = [None]
890
    conds = set()
891
    distinct_on = []
892
    def mk_main_select(joins, cols):
893
        return mk_select(db, joins, cols, conds, distinct_on,
894
            limit=limit_ref[0], start=0)
895
    
896
    def log_exc(e):
897
        db.log_debug('Caught exception: '+exc.str_(e, first_line_only=True))
898
    def remove_all_rows(msg):
899
        warnings.warn(DbWarning(msg))
900
        db.log_debug(msg.partition('\n')[0])
901
        db.log_debug('Returning NULL for all rows')
902
        limit_ref[0] = 0 # just create an empty pkeys table
903
    def ignore(in_col, value):
904
        in_col_str = str(in_col)
905
        db.log_debug('Adding index on '+in_col_str+' to enable fast filtering')
906
        index_col(db, in_col)
907
        db.log_debug('Ignoring rows with '+in_col_str+' = '+repr(value))
908
    def remove_rows(in_col, value):
909
        ignore(in_col, value)
910
        cond = (in_col, sql_gen.CompareCond(value, '!='))
911
        assert cond not in conds # avoid infinite loops
912
        conds.add(cond)
913
    def invalid2null(in_col, value):
914
        ignore(in_col, value)
915
        update(db, in_table, [(in_col, None)],
916
            sql_gen.ColValueCond(in_col, value))
917
    
918
    # Do inserts and selects
919
    join_cols = {}
920
    insert_out_pkeys = sql_gen.Table(temp_prefix+'_insert_out_pkeys')
921
    insert_in_pkeys = sql_gen.Table(temp_prefix+'_insert_in_pkeys')
922
    while True:
923
        has_joins = join_cols != {}
924
        
925
        # Prepare to insert new rows
926
        insert_joins = input_joins[:] # don't modify original!
927
        insert_args = dict(recover=True, cacheable=False)
928
        if has_joins:
929
            distinct_on = [v.to_Col() for v in join_cols.values()]
930
            insert_joins.append(sql_gen.Join(out_table, join_cols,
931
                sql_gen.filter_out))
932
        else:
933
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
934
        
935
        db.log_debug('Inserting new rows')
936
        try:
937
            cur = insert_select(db, out_table, mapping.keys(),
938
                *mk_main_select(insert_joins, mapping.values()), **insert_args)
939
            break # insert successful
940
        except DuplicateKeyException, e:
941
            log_exc(e)
942
            
943
            old_join_cols = join_cols.copy()
944
            join_cols.update(util.dict_subset(mapping, e.cols))
945
            db.log_debug('Ignoring existing rows, comparing on '+str(join_cols))
946
            assert join_cols != old_join_cols # avoid infinite loops
947
        except NullValueException, e:
948
            log_exc(e)
949
            
950
            out_col, = e.cols
951
            try: in_col = mapping[out_col]
952
            except KeyError:
953
                remove_all_rows('Missing mapping for NOT NULL '+out_col)
954
            else: remove_rows(in_col, None)
955
        except FunctionValueException, e:
956
            log_exc(e)
957
            
958
            assert e.name == out_table.name
959
            out_col = 'value' # assume function param was named "value"
960
            invalid2null(mapping[out_col], e.value)
961
        except DatabaseErrors, e:
962
            log_exc(e)
963
            
964
            remove_all_rows('No handler for exception: '+exc.str_(e))
965
        # after exception handled, rerun loop with additional constraints
966
    
967
    if row_ct_ref != None and cur.rowcount >= 0:
968
        row_ct_ref[0] += cur.rowcount
969
    
970
    if has_joins:
971
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
972
        db.log_debug('Getting output pkeys of existing/inserted rows')
973
        insert_into_pkeys(select_joins, pkeys_cols)
974
    else:
975
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
976
        
977
        db.log_debug('Getting input pkeys for rows in insert')
978
        run_query_into(db, *mk_main_select(input_joins, [in_pkey]),
979
            into=insert_in_pkeys)
980
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
981
        
982
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
983
            insert_in_pkeys)
984
        
985
        db.log_debug('Joining together output and input pkeys')
986
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
987
            {row_num_col: sql_gen.join_same_not_null})]
988
        insert_into_pkeys(pkey_joins, pkeys_names)
989
    
990
    db.log_debug('Adding pkey on returned pkeys table to enable fast joins')
991
    index_pkey(db, pkeys)
992
    
993
    db.log_debug("Setting missing rows' pkeys to NULL")
994
    missing_rows_joins = input_joins+[sql_gen.Join(pkeys,
995
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
996
        # must use join_same_not_null or query will take forever
997
    insert_into_pkeys(missing_rows_joins,
998
        [in_pkey_col, sql_gen.NamedCol(out_pkey, None)])
999
    
1000
    assert table_row_count(db, pkeys) == table_row_count(db, in_table)
1001
    
1002
    return sql_gen.Col(out_pkey, pkeys)
1003

    
1004
##### Data cleanup
1005

    
1006
def cleanup_table(db, table, cols):
1007
    def esc_name_(name): return esc_name(db, name)
1008
    
1009
    table = sql_gen.as_Table(table).to_str(db)
1010
    cols = map(esc_name_, cols)
1011
    
1012
    run_query(db, 'UPDATE '+table+' SET\n'+(',\n'.join(('\n'+col
1013
        +' = nullif(nullif(trim(both from '+col+"), %(null0)s), %(null1)s)"
1014
            for col in cols))),
1015
        dict(null0='', null1=r'\N'))
(23-23/35)