Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+str(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+str(name), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self,
45
            'for name: '+str(name)+'; value: '+repr(value), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+name+ ' constraint on columns: '
52
            +(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class NameException(DbException): pass
57

    
58
class DuplicateKeyException(ConstraintException): pass
59

    
60
class NullValueException(ConstraintException): pass
61

    
62
class FunctionValueException(ExceptionWithNameValue): pass
63

    
64
class DuplicateTableException(ExceptionWithName): pass
65

    
66
class DuplicateFunctionException(ExceptionWithName): pass
67

    
68
class EmptyRowException(DbException): pass
69

    
70
##### Warnings
71

    
72
class DbWarning(UserWarning): pass
73

    
74
##### Result retrieval
75

    
76
def col_names(cur): return (col[0] for col in cur.description)
77

    
78
def rows(cur): return iter(lambda: cur.fetchone(), None)
79

    
80
def consume_rows(cur):
81
    '''Used to fetch all rows so result will be cached'''
82
    iters.consume_iter(rows(cur))
83

    
84
def next_row(cur): return rows(cur).next()
85

    
86
def row(cur):
87
    row_ = next_row(cur)
88
    consume_rows(cur)
89
    return row_
90

    
91
def next_value(cur): return next_row(cur)[0]
92

    
93
def value(cur): return row(cur)[0]
94

    
95
def values(cur): return iters.func_iter(lambda: next_value(cur))
96

    
97
def value_or_none(cur):
98
    try: return value(cur)
99
    except StopIteration: return None
100

    
101
##### Input validation
102

    
103
def clean_name(name): return re.sub(r'\W', r'', name.replace('.', '_'))
104

    
105
def check_name(name):
106
    if re.search(r'\W', name) != None: raise NameException('Name "'+name
107
        +'" may contain only alphanumeric characters and _')
108

    
109
def esc_name_by_module(module, name, ignore_case=False):
110
    if module == 'psycopg2' or module == None:
111
        if ignore_case:
112
            # Don't enclose in quotes because this disables case-insensitivity
113
            check_name(name)
114
            return name
115
        else: quote = '"'
116
    elif module == 'MySQLdb': quote = '`'
117
    else: raise NotImplementedError("Can't escape name for "+module+' database')
118
    return quote + name.replace(quote, '') + quote
119

    
120
def esc_name_by_engine(engine, name, **kw_args):
121
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
122

    
123
def esc_name(db, name, **kw_args):
124
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
125

    
126
def qual_name(db, schema, table):
127
    def esc_name_(name): return esc_name(db, name)
128
    table = esc_name_(table)
129
    if schema != None: return esc_name_(schema)+'.'+table
130
    else: return table
131

    
132
##### Database connections
133

    
134
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
135

    
136
db_engines = {
137
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
138
    'PostgreSQL': ('psycopg2', {}),
139
}
140

    
141
DatabaseErrors_set = set([DbException])
142
DatabaseErrors = tuple(DatabaseErrors_set)
143

    
144
def _add_module(module):
145
    DatabaseErrors_set.add(module.DatabaseError)
146
    global DatabaseErrors
147
    DatabaseErrors = tuple(DatabaseErrors_set)
148

    
149
def db_config_str(db_config):
150
    return db_config['engine']+' database '+db_config['database']
151

    
152
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
153

    
154
log_debug_none = lambda msg: None
155

    
156
class DbConn:
157
    def __init__(self, db_config, serializable=True, autocommit=False,
158
        caching=True, log_debug=log_debug_none):
159
        self.db_config = db_config
160
        self.serializable = serializable
161
        self.autocommit = autocommit
162
        self.caching = caching
163
        self.log_debug = log_debug
164
        self.debug = log_debug != log_debug_none
165
        
166
        self.__db = None
167
        self.query_results = {}
168
        self._savepoint = 0
169
    
170
    def __getattr__(self, name):
171
        if name == '__dict__': raise Exception('getting __dict__')
172
        if name == 'db': return self._db()
173
        else: raise AttributeError()
174
    
175
    def __getstate__(self):
176
        state = copy.copy(self.__dict__) # shallow copy
177
        state['log_debug'] = None # don't pickle the debug callback
178
        state['_DbConn__db'] = None # don't pickle the connection
179
        return state
180
    
181
    def connected(self): return self.__db != None
182
    
183
    def _db(self):
184
        if self.__db == None:
185
            # Process db_config
186
            db_config = self.db_config.copy() # don't modify input!
187
            schemas = db_config.pop('schemas', None)
188
            module_name, mappings = db_engines[db_config.pop('engine')]
189
            module = __import__(module_name)
190
            _add_module(module)
191
            for orig, new in mappings.iteritems():
192
                try: util.rename_key(db_config, orig, new)
193
                except KeyError: pass
194
            
195
            # Connect
196
            self.__db = module.connect(**db_config)
197
            
198
            # Configure connection
199
            if self.serializable and not self.autocommit: run_raw_query(self,
200
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE')
201
            if schemas != None:
202
                schemas_ = ''.join((esc_name(self, s)+', '
203
                    for s in schemas.split(',')))
204
                run_raw_query(self, "SELECT set_config('search_path', \
205
%s || current_setting('search_path'), false)", [schemas_])
206
        
207
        return self.__db
208
    
209
    class DbCursor(Proxy):
210
        def __init__(self, outer):
211
            Proxy.__init__(self, outer.db.cursor())
212
            self.outer = outer
213
            self.query_results = outer.query_results
214
            self.query_lookup = None
215
            self.result = []
216
        
217
        def execute(self, query, params=None):
218
            self._is_insert = query.upper().find('INSERT') >= 0
219
            self.query_lookup = _query_lookup(query, params)
220
            try:
221
                try:
222
                    return_value = self.inner.execute(query, params)
223
                    self.outer.do_autocommit()
224
                finally: self.query = get_cur_query(self.inner)
225
            except Exception, e:
226
                _add_cursor_info(e, self, query, params)
227
                self.result = e # cache the exception as the result
228
                self._cache_result()
229
                raise
230
            # Fetch all rows so result will be cached
231
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
232
            return return_value
233
        
234
        def fetchone(self):
235
            row = self.inner.fetchone()
236
            if row != None: self.result.append(row)
237
            # otherwise, fetched all rows
238
            else: self._cache_result()
239
            return row
240
        
241
        def _cache_result(self):
242
            # For inserts, only cache exceptions since inserts are not
243
            # idempotent, but an invalid insert will always be invalid
244
            if self.query_results != None and (not self._is_insert
245
                or isinstance(self.result, Exception)):
246
                
247
                assert self.query_lookup != None
248
                self.query_results[self.query_lookup] = self.CacheCursor(
249
                    util.dict_subset(dicts.AttrsDictView(self),
250
                    ['query', 'result', 'rowcount', 'description']))
251
        
252
        class CacheCursor:
253
            def __init__(self, cached_result): self.__dict__ = cached_result
254
            
255
            def execute(self, *args, **kw_args):
256
                if isinstance(self.result, Exception): raise self.result
257
                # otherwise, result is a rows list
258
                self.iter = iter(self.result)
259
            
260
            def fetchone(self):
261
                try: return self.iter.next()
262
                except StopIteration: return None
263
    
264
    def esc_value(self, value):
265
        module = util.root_module(self.db)
266
        if module == 'psycopg2': str_ = self.db.cursor().mogrify('%s', [value])
267
        elif module == 'MySQLdb':
268
            import _mysql
269
            str_ = _mysql.escape_string(value)
270
        else: raise NotImplementedError("Can't escape value for "+module
271
            +' database')
272
        return strings.to_unicode(str_)
273
    
274
    def esc_name(self, name): return esc_name(self, name) # calls global func
275
    
276
    def run_query(self, query, params=None, cacheable=False, log_level=2):
277
        '''Translates known DB errors to typed exceptions:
278
        See self.DbCursor.execute().'''
279
        assert query != None
280
        
281
        if not self.caching: cacheable = False
282
        used_cache = False
283
        try:
284
            # Get cursor
285
            if cacheable:
286
                query_lookup = _query_lookup(query, params)
287
                try:
288
                    cur = self.query_results[query_lookup]
289
                    used_cache = True
290
                except KeyError: cur = self.DbCursor(self)
291
            else: cur = self.db.cursor()
292
            
293
            # Run query
294
            cur.execute(query, params)
295
        finally:
296
            if self.debug: # only compute msg if needed
297
                if used_cache: cache_status = 'Cache hit'
298
                elif cacheable: cache_status = 'Cache miss'
299
                else: cache_status = 'Non-cacheable'
300
                self.log_debug(cache_status+': '+strings.one_line(
301
                    str(get_cur_query(cur, query, params))), log_level)
302
        
303
        return cur
304
    
305
    def is_cached(self, query, params=None):
306
        return _query_lookup(query, params) in self.query_results
307
    
308
    def with_savepoint(self, func):
309
        savepoint = 'level_'+str(self._savepoint)
310
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
311
        self._savepoint += 1
312
        try: 
313
            try: return_val = func()
314
            finally:
315
                self._savepoint -= 1
316
                assert self._savepoint >= 0
317
        except:
318
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
319
            raise
320
        else:
321
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
322
            self.do_autocommit()
323
            return return_val
324
    
325
    def do_autocommit(self):
326
        '''Autocommits if outside savepoint'''
327
        assert self._savepoint >= 0
328
        if self.autocommit and self._savepoint == 0:
329
            self.log_debug('Autocommiting')
330
            self.db.commit()
331

    
332
connect = DbConn
333

    
334
##### Querying
335

    
336
def run_raw_query(db, *args, **kw_args):
337
    '''For params, see DbConn.run_query()'''
338
    return db.run_query(*args, **kw_args)
339

    
340
def mogrify(db, query, params):
341
    module = util.root_module(db.db)
342
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
343
    else: raise NotImplementedError("Can't mogrify query for "+module+
344
        ' database')
345

    
346
##### Recoverable querying
347

    
348
def with_savepoint(db, func): return db.with_savepoint(func)
349

    
350
def run_query(db, query, params=None, recover=None, cacheable=False, **kw_args):
351
    '''For params, see run_raw_query()'''
352
    if recover == None: recover = False
353
    
354
    try:
355
        def run(): return run_raw_query(db, query, params, cacheable, **kw_args)
356
        if recover and not db.is_cached(query, params):
357
            return with_savepoint(db, run)
358
        else: return run() # don't need savepoint if cached
359
    except Exception, e:
360
        if not recover: raise # need savepoint to run index_cols()
361
        msg = exc.str_(e)
362
        
363
        match = re.search(r'duplicate key value violates unique constraint '
364
            r'"((_?[^\W_]+)_[^"]+?)"', msg)
365
        if match:
366
            constraint, table = match.groups()
367
            try: cols = index_cols(db, table, constraint)
368
            except NotImplementedError: raise e
369
            else: raise DuplicateKeyException(constraint, cols, e)
370
        
371
        match = re.search(r'null value in column "(\w+?)" violates not-null '
372
            r'constraint', msg)
373
        if match: raise NullValueException('NOT NULL', [match.group(1)], e)
374
        
375
        match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
376
            r'|date/time field value out of range): "(.+?)"\n'
377
            r'(?:(?s).*?)\bfunction "(\w+?)".*?\bat assignment', msg)
378
        if match:
379
            value, name = match.groups()
380
            raise FunctionValueException(name, strings.to_unicode(value), e)
381
        
382
        match = re.search(r'relation "(\w+?)" already exists', msg)
383
        if match: raise DuplicateTableException(match.group(1), e)
384
        
385
        match = re.search(r'function "(\w+?)" already exists', msg)
386
        if match: raise DuplicateFunctionException(match.group(1), e)
387
        
388
        raise # no specific exception raised
389

    
390
##### Basic queries
391

    
392
def next_version(name):
393
    '''Prepends the version # so it won't be removed if the name is truncated'''
394
    version = 1 # first existing name was version 0
395
    match = re.match(r'^v(\d+)_(.*)$', name)
396
    if match:
397
        version = int(match.group(1))+1
398
        name = match.group(2)
399
    return 'v'+str(version)+'_'+name
400

    
401
def run_query_into(db, query, params, into=None, *args, **kw_args):
402
    '''Outputs a query to a temp table.
403
    For params, see run_query().
404
    '''
405
    if into == None: return run_query(db, query, params, *args, **kw_args)
406
    else: # place rows in temp table
407
        assert isinstance(into, sql_gen.Table)
408
        
409
        kw_args['recover'] = True
410
        
411
        temp = not db.debug # tables are created as permanent in debug mode
412
        # "temporary tables cannot specify a schema name", so remove schema
413
        if temp: into.schema = None
414
        
415
        while True:
416
            try:
417
                create_query = 'CREATE'
418
                if temp: create_query += ' TEMP'
419
                create_query += ' TABLE '+into.to_str(db)+' AS '+query
420
                
421
                return run_query(db, create_query, params, *args, **kw_args)
422
                    # CREATE TABLE AS sets rowcount to # rows in query
423
            except DuplicateTableException, e:
424
                into.name = next_version(into.name)
425
                # try again with next version of name
426

    
427
order_by_pkey = object() # tells mk_select() to order by the pkey
428

    
429
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
430

    
431
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
432
    start=None, order_by=order_by_pkey, default_table=None):
433
    '''
434
    @param tables The single table to select from, or a list of tables to join
435
        together, with tables after the first being sql_gen.Join objects
436
    @param fields Use None to select all fields in the table
437
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
438
        * container can be any iterable type
439
        * compare_left_side: sql_gen.Code|str (for col name)
440
        * compare_right_side: sql_gen.ValueCond|literal value
441
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
442
        use all columns
443
    @return tuple(query, params)
444
    '''
445
    # Parse tables param
446
    if not lists.is_seq(tables): tables = [tables]
447
    tables = list(tables) # don't modify input! (list() copies input)
448
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
449
    
450
    # Parse other params
451
    if conds == None: conds = []
452
    elif isinstance(conds, dict): conds = conds.items()
453
    conds = list(conds) # don't modify input! (list() copies input)
454
    assert limit == None or type(limit) == int
455
    assert start == None or type(start) == int
456
    if order_by is order_by_pkey:
457
        if distinct_on != []: order_by = None
458
        else: order_by = pkey(db, table0, recover=True)
459
    
460
    query = 'SELECT'
461
    
462
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
463
    
464
    # DISTINCT ON columns
465
    if distinct_on != []:
466
        query += ' DISTINCT'
467
        if distinct_on is not distinct_on_all:
468
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
469
    
470
    # Columns
471
    query += ' '
472
    if fields == None: query += '*'
473
    else: query += ', '.join(map(parse_col, fields))
474
    
475
    # Main table
476
    query += ' FROM '+table0.to_str(db)
477
    
478
    # Add joins
479
    left_table = table0
480
    for join_ in tables:
481
        table = join_.table
482
        
483
        # Parse special values
484
        if join_.type_ is sql_gen.filter_out: # filter no match
485
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
486
                None))
487
        
488
        query += ' '+join_.to_str(db, left_table)
489
        
490
        left_table = table
491
    
492
    missing = True
493
    if conds != []:
494
        query += ' WHERE '+(' AND '.join(('('+sql_gen.ColValueCond(l, r)
495
            .to_str(db)+')' for l, r in conds)))
496
        missing = False
497
    if order_by != None:
498
        query += ' ORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
499
    if limit != None: query += ' LIMIT '+str(limit); missing = False
500
    if start != None:
501
        if start != 0: query += ' OFFSET '+str(start)
502
        missing = False
503
    if missing: warnings.warn(DbWarning(
504
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
505
    
506
    return (query, [])
507

    
508
def select(db, *args, **kw_args):
509
    '''For params, see mk_select() and run_query()'''
510
    recover = kw_args.pop('recover', None)
511
    cacheable = kw_args.pop('cacheable', True)
512
    log_level = kw_args.pop('log_level', 2)
513
    
514
    query, params = mk_select(db, *args, **kw_args)
515
    return run_query(db, query, params, recover, cacheable, log_level=log_level)
516

    
517
def mk_insert_select(db, table, cols=None, select_query=None, params=None,
518
    returning=None, embeddable=False):
519
    '''
520
    @param returning str|None An inserted column (such as pkey) to return
521
    @param embeddable Whether the query should be embeddable as a nested SELECT.
522
        Warning: If you set this and cacheable=True when the query is run, the
523
        query will be fully cached, not just if it raises an exception.
524
    '''
525
    table = sql_gen.as_Table(table)
526
    if cols == []: cols = None # no cols (all defaults) = unknown col names
527
    if cols != None: cols = [sql_gen.as_Col(v).to_str(db) for v in cols]
528
    if select_query == None: select_query = 'DEFAULT VALUES'
529
    if returning != None: returning = sql_gen.as_Col(returning, table)
530
    
531
    # Build query
532
    query = 'INSERT INTO '+table.to_str(db)
533
    if cols != None: query += ' ('+', '.join(cols)+')'
534
    query += ' '+select_query
535
    
536
    if returning != None:
537
        returning_name = copy.copy(returning)
538
        returning_name.table = None
539
        returning_name = returning_name.to_str(db)
540
        query += ' RETURNING '+returning_name
541
    
542
    if embeddable:
543
        assert returning != None
544
        
545
        # Create function
546
        function_name = '_'.join(['insert', table.name] + cols)
547
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
548
        while True:
549
            try:
550
                func_schema = None
551
                if not db.debug: func_schema = 'pg_temp'
552
                function = sql_gen.Table(function_name, func_schema).to_str(db)
553
                
554
                function_query = '''\
555
CREATE FUNCTION '''+function+'''() RETURNS '''+return_type+'''
556
    LANGUAGE sql
557
    AS $$'''+mogrify(db, query, params)+''';$$;
558
'''
559
                run_query(db, function_query, recover=True, cacheable=True)
560
                break # this version was successful
561
            except DuplicateFunctionException, e:
562
                function_name = next_version(function_name)
563
                # try again with next version of name
564
        
565
        # Return query that uses function
566
        func_table = sql_gen.NamedTable('f', sql_gen.CustomCode(function+'()'),
567
            [returning_name]) # AS clause requires function alias
568
        return mk_select(db, func_table, start=0, order_by=None)
569
    
570
    return (query, params)
571

    
572
def insert_select(db, *args, **kw_args):
573
    '''For params, see mk_insert_select() and run_query_into()
574
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
575
        values in
576
    '''
577
    into = kw_args.pop('into', None)
578
    if into != None: kw_args['embeddable'] = True
579
    recover = kw_args.pop('recover', None)
580
    cacheable = kw_args.pop('cacheable', True)
581
    
582
    query, params = mk_insert_select(db, *args, **kw_args)
583
    return run_query_into(db, query, params, into, recover=recover,
584
        cacheable=cacheable)
585

    
586
default = object() # tells insert() to use the default value for a column
587

    
588
def insert(db, table, row, *args, **kw_args):
589
    '''For params, see insert_select()'''
590
    if lists.is_seq(row): cols = None
591
    else:
592
        cols = row.keys()
593
        row = row.values()
594
    row = list(row) # ensure that "!= []" works
595
    
596
    # Check for special values
597
    labels = []
598
    values = []
599
    for value in row:
600
        if value is default: labels.append('DEFAULT')
601
        else:
602
            labels.append('%s')
603
            values.append(value)
604
    
605
    # Build query
606
    if values != []: query = ' VALUES ('+(', '.join(labels))+')'
607
    else: query = None
608
    
609
    return insert_select(db, table, cols, query, values, *args, **kw_args)
610

    
611
def mk_update(db, table, changes=None, cond=None):
612
    '''
613
    @param changes [(col, new_value),...]
614
        * container can be any iterable type
615
        * col: sql_gen.Code|str (for col name)
616
        * new_value: sql_gen.Code|literal value
617
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
618
    @return str query
619
    '''
620
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
621
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
622
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
623
    if cond != None: query += ' WHERE '+cond.to_str(db)
624
    
625
    return query
626

    
627
def update(db, *args, **kw_args):
628
    '''For params, see mk_update() and run_query()'''
629
    recover = kw_args.pop('recover', None)
630
    
631
    return run_query(db, mk_update(db, *args, **kw_args), [], recover)
632

    
633
def last_insert_id(db):
634
    module = util.root_module(db.db)
635
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
636
    elif module == 'MySQLdb': return db.insert_id()
637
    else: return None
638

    
639
def truncate(db, table, schema='public'):
640
    return run_query(db, 'TRUNCATE '+qual_name(db, schema, table)+' CASCADE')
641

    
642
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
643
    '''Creates a mapping from original column names (which may have collisions)
644
    to names that will be distinct among the columns' tables.
645
    This is meant to be used for several tables that are being joined together.
646
    @param cols The columns to combine. Duplicates will be removed.
647
    @param into The table for the new columns.
648
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
649
        columns will be included in the mapping even if they are not in cols.
650
        The tables of the provided Col objects will be changed to into, so make
651
        copies of them if you want to keep the original tables.
652
    @param as_items Whether to return a list of dict items instead of a dict
653
    @return dict(orig_col=new_col, ...)
654
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
655
        * new_col: sql_gen.Col(orig_col_name, into)
656
        * All mappings use the into table so its name can easily be
657
          changed for all columns at once
658
    '''
659
    cols = lists.uniqify(cols)
660
    
661
    items = []
662
    for col in preserve:
663
        orig_col = copy.copy(col)
664
        col.table = into
665
        items.append((orig_col, col))
666
    preserve = set(preserve)
667
    for col in cols:
668
        if col not in preserve:
669
            items.append((col, sql_gen.Col(clean_name(str(col)), into)))
670
    
671
    if not as_items: items = dict(items)
672
    return items
673

    
674
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
675
    '''For params, see mk_flatten_mapping()
676
    @return See return value of mk_flatten_mapping()
677
    '''
678
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
679
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
680
    run_query_into(db, *mk_select(db, joins, cols, limit=limit, start=start),
681
        into=into)
682
    return dict(items)
683

    
684
##### Database structure queries
685

    
686
def table_row_count(db, table, recover=None):
687
    return value(run_query(db, *mk_select(db, table, [sql_gen.row_count],
688
        order_by=None, start=0), recover=recover, log_level=3))
689

    
690
def table_cols(db, table, recover=None):
691
    return list(col_names(select(db, table, limit=0, order_by=None,
692
        recover=recover, log_level=4)))
693

    
694
def pkey(db, table, recover=None):
695
    '''Assumed to be first column in table'''
696
    return table_cols(db, table, recover)[0]
697

    
698
not_null_col = 'not_null'
699

    
700
def table_not_null_col(db, table, recover=None):
701
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
702
    if not_null_col in table_cols(db, table, recover): return not_null_col
703
    else: return pkey(db, table, recover)
704

    
705
def index_cols(db, table, index):
706
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
707
    automatically created. When you don't know whether something is a UNIQUE
708
    constraint or a UNIQUE index, use this function.'''
709
    module = util.root_module(db.db)
710
    if module == 'psycopg2':
711
        return list(values(run_query(db, '''\
712
SELECT attname
713
FROM
714
(
715
        SELECT attnum, attname
716
        FROM pg_index
717
        JOIN pg_class index ON index.oid = indexrelid
718
        JOIN pg_class table_ ON table_.oid = indrelid
719
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
720
        WHERE
721
            table_.relname = %(table)s
722
            AND index.relname = %(index)s
723
    UNION
724
        SELECT attnum, attname
725
        FROM
726
        (
727
            SELECT
728
                indrelid
729
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
730
                    AS indkey
731
            FROM pg_index
732
            JOIN pg_class index ON index.oid = indexrelid
733
            JOIN pg_class table_ ON table_.oid = indrelid
734
            WHERE
735
                table_.relname = %(table)s
736
                AND index.relname = %(index)s
737
        ) s
738
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
739
) s
740
ORDER BY attnum
741
''',
742
            {'table': table, 'index': index}, cacheable=True, log_level=4)))
743
    else: raise NotImplementedError("Can't list index columns for "+module+
744
        ' database')
745

    
746
def constraint_cols(db, table, constraint):
747
    module = util.root_module(db.db)
748
    if module == 'psycopg2':
749
        return list(values(run_query(db, '''\
750
SELECT attname
751
FROM pg_constraint
752
JOIN pg_class ON pg_class.oid = conrelid
753
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
754
WHERE
755
    relname = %(table)s
756
    AND conname = %(constraint)s
757
ORDER BY attnum
758
''',
759
            {'table': table, 'constraint': constraint})))
760
    else: raise NotImplementedError("Can't list constraint columns for "+module+
761
        ' database')
762

    
763
row_num_col = '_row_num'
764

    
765
def index_col(db, col):
766
    '''Adds an index on a column if it doesn't already exist.'''
767
    assert sql_gen.is_table_col(col)
768
    
769
    table = col.table
770
    index = sql_gen.as_Table(clean_name(str(col)))
771
    col = sql_gen.to_name_only_col(col)
772
    try: run_query(db, 'CREATE INDEX '+index.to_str(db)+' ON '+table.to_str(db)
773
        +' ('+col.to_str(db)+')', recover=True, cacheable=True, log_level=3)
774
    except DuplicateTableException: pass # index already existed
775

    
776
def index_pkey(db, table, recover=None):
777
    '''Makes the first column in a table the primary key.
778
    @pre The table must not already have a primary key.
779
    '''
780
    table = sql_gen.as_Table(table)
781
    
782
    index = sql_gen.as_Table(table.name+'_pkey')
783
    col = sql_gen.to_name_only_col(pkey(db, table, recover))
784
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD CONSTRAINT '
785
        +index.to_str(db)+' PRIMARY KEY('+col.to_str(db)+')', recover=recover,
786
        log_level=3)
787

    
788
def add_row_num(db, table):
789
    '''Adds a row number column to a table. Its name is in row_num_col. It will
790
    be the primary key.'''
791
    table = sql_gen.as_Table(table).to_str(db)
792
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
793
        +' serial NOT NULL PRIMARY KEY', log_level=3)
794

    
795
def tables(db, schema='public', table_like='%'):
796
    module = util.root_module(db.db)
797
    params = {'schema': schema, 'table_like': table_like}
798
    if module == 'psycopg2':
799
        return values(run_query(db, '''\
800
SELECT tablename
801
FROM pg_tables
802
WHERE
803
    schemaname = %(schema)s
804
    AND tablename LIKE %(table_like)s
805
ORDER BY tablename
806
''',
807
            params, cacheable=True))
808
    elif module == 'MySQLdb':
809
        return values(run_query(db, 'SHOW TABLES LIKE %(table_like)s', params,
810
            cacheable=True))
811
    else: raise NotImplementedError("Can't list tables for "+module+' database')
812

    
813
##### Database management
814

    
815
def empty_db(db, schema='public', **kw_args):
816
    '''For kw_args, see tables()'''
817
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
818

    
819
##### Heuristic queries
820

    
821
def put(db, table, row, pkey_=None, row_ct_ref=None):
822
    '''Recovers from errors.
823
    Only works under PostgreSQL (uses INSERT RETURNING).
824
    '''
825
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
826
    
827
    try:
828
        cur = insert(db, table, row, pkey_, recover=True)
829
        if row_ct_ref != None and cur.rowcount >= 0:
830
            row_ct_ref[0] += cur.rowcount
831
        return value(cur)
832
    except DuplicateKeyException, e:
833
        return value(select(db, table, [pkey_],
834
            util.dict_subset_right_join(row, e.cols), recover=True))
835

    
836
def get(db, table, row, pkey, row_ct_ref=None, create=False):
837
    '''Recovers from errors'''
838
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
839
    except StopIteration:
840
        if not create: raise
841
        return put(db, table, row, pkey, row_ct_ref) # insert new row
842

    
843
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None):
844
    '''Recovers from errors.
845
    Only works under PostgreSQL (uses INSERT RETURNING).
846
    @param in_tables The main input table to select from, followed by a list of
847
        tables to join with it using the main input table's pkey
848
    @param mapping dict(out_table_col=in_table_col, ...)
849
        * out_table_col: sql_gen.Col|str
850
        * in_table_col: sql_gen.Col Wrap literal values in a sql_gen.NamedCol
851
    @return sql_gen.Col Where the output pkeys are made available
852
    '''
853
    out_table = sql_gen.as_Table(out_table)
854
    for in_table_col in mapping.itervalues():
855
        assert isinstance(in_table_col, sql_gen.Col)
856
    
857
    temp_prefix = out_table.name
858
    pkeys = sql_gen.Table(temp_prefix+'_pkeys')
859
    
860
    # Create input joins from list of input tables
861
    in_tables_ = in_tables[:] # don't modify input!
862
    in_tables0 = in_tables_.pop(0) # first table is separate
863
    in_pkey = pkey(db, in_tables0, recover=True)
864
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
865
    input_joins = [in_tables0]+[sql_gen.Join(v, {in_pkey: sql_gen.join_same})
866
        for v in in_tables_]
867
    
868
    db.log_debug('Joining together input tables')
869
    # Place in new table for speed and so don't modify input if values edited
870
    in_table = sql_gen.Table(temp_prefix+'_in')
871
    flatten_cols = filter(sql_gen.is_table_col, mapping.values())
872
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins,
873
        flatten_cols, preserve=[in_pkey_col], start=0))
874
    input_joins = [in_table]
875
    
876
    out_pkey = pkey(db, out_table, recover=True)
877
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
878
    
879
    pkeys_names = [in_pkey, out_pkey]
880
    pkeys_cols = [in_pkey_col, out_pkey_col]
881
    
882
    pkeys_table_exists_ref = [False]
883
    def insert_into_pkeys(joins, cols):
884
        query, params = mk_select(db, joins, cols, order_by=None, start=0)
885
        if pkeys_table_exists_ref[0]:
886
            insert_select(db, pkeys, pkeys_names, query, params)
887
        else:
888
            run_query_into(db, query, params, into=pkeys)
889
            pkeys_table_exists_ref[0] = True
890
    
891
    limit_ref = [None]
892
    conds = set()
893
    distinct_on = []
894
    def mk_main_select(joins, cols):
895
        return mk_select(db, joins, cols, conds, distinct_on,
896
            limit=limit_ref[0], start=0)
897
    
898
    def log_exc(e):
899
        db.log_debug('Caught exception: '+exc.str_(e, first_line_only=True))
900
    def remove_all_rows(msg):
901
        warnings.warn(DbWarning(msg))
902
        db.log_debug(msg.partition('\n')[0])
903
        db.log_debug('Returning NULL for all rows')
904
        limit_ref[0] = 0 # just create an empty pkeys table
905
    def ignore(in_col, value):
906
        in_col_str = str(in_col)
907
        db.log_debug('Adding index on '+in_col_str+' to enable fast filtering')
908
        index_col(db, in_col)
909
        db.log_debug('Ignoring rows with '+in_col_str+' = '+repr(value))
910
    def remove_rows(in_col, value):
911
        ignore(in_col, value)
912
        cond = (in_col, sql_gen.CompareCond(value, '!='))
913
        assert cond not in conds # avoid infinite loops
914
        conds.add(cond)
915
    def invalid2null(in_col, value):
916
        ignore(in_col, value)
917
        update(db, in_table, [(in_col, None)],
918
            sql_gen.ColValueCond(in_col, value))
919
    
920
    # Do inserts and selects
921
    join_cols = {}
922
    insert_out_pkeys = sql_gen.Table(temp_prefix+'_insert_out_pkeys')
923
    insert_in_pkeys = sql_gen.Table(temp_prefix+'_insert_in_pkeys')
924
    while True:
925
        has_joins = join_cols != {}
926
        
927
        # Prepare to insert new rows
928
        insert_joins = input_joins[:] # don't modify original!
929
        insert_args = dict(recover=True, cacheable=False)
930
        if has_joins:
931
            distinct_on = [v.to_Col() for v in join_cols.values()]
932
            insert_joins.append(sql_gen.Join(out_table, join_cols,
933
                sql_gen.filter_out))
934
        else:
935
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
936
        
937
        db.log_debug('Inserting new rows')
938
        try:
939
            cur = insert_select(db, out_table, mapping.keys(),
940
                *mk_main_select(insert_joins, mapping.values()), **insert_args)
941
            break # insert successful
942
        except DuplicateKeyException, e:
943
            log_exc(e)
944
            
945
            old_join_cols = join_cols.copy()
946
            join_cols.update(util.dict_subset(mapping, e.cols))
947
            db.log_debug('Ignoring existing rows, comparing on '+str(join_cols))
948
            assert join_cols != old_join_cols # avoid infinite loops
949
        except NullValueException, e:
950
            log_exc(e)
951
            
952
            out_col, = e.cols
953
            try: in_col = mapping[out_col]
954
            except KeyError:
955
                remove_all_rows('Missing mapping for NOT NULL '+out_col)
956
            else: remove_rows(in_col, None)
957
        except FunctionValueException, e:
958
            log_exc(e)
959
            
960
            assert e.name == out_table.name
961
            out_col = 'value' # assume function param was named "value"
962
            invalid2null(mapping[out_col], e.value)
963
        except DatabaseErrors, e:
964
            log_exc(e)
965
            
966
            remove_all_rows('No handler for exception: '+exc.str_(e))
967
        # after exception handled, rerun loop with additional constraints
968
    
969
    if row_ct_ref != None and cur.rowcount >= 0:
970
        row_ct_ref[0] += cur.rowcount
971
    
972
    if has_joins:
973
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
974
        db.log_debug('Getting output pkeys of existing/inserted rows')
975
        insert_into_pkeys(select_joins, pkeys_cols)
976
    else:
977
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
978
        
979
        db.log_debug('Getting input pkeys for rows in insert')
980
        run_query_into(db, *mk_main_select(input_joins, [in_pkey]),
981
            into=insert_in_pkeys)
982
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
983
        
984
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
985
            insert_in_pkeys)
986
        
987
        db.log_debug('Joining together output and input pkeys')
988
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
989
            {row_num_col: sql_gen.join_same_not_null})]
990
        insert_into_pkeys(pkey_joins, pkeys_names)
991
    
992
    db.log_debug('Adding pkey on returned pkeys table to enable fast joins')
993
    index_pkey(db, pkeys)
994
    
995
    db.log_debug("Setting missing rows' pkeys to NULL")
996
    missing_rows_joins = input_joins+[sql_gen.Join(pkeys,
997
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
998
        # must use join_same_not_null or query will take forever
999
    insert_into_pkeys(missing_rows_joins,
1000
        [in_pkey_col, sql_gen.NamedCol(out_pkey, None)])
1001
    
1002
    assert table_row_count(db, pkeys) == table_row_count(db, in_table)
1003
    
1004
    return sql_gen.Col(out_pkey, pkeys)
1005

    
1006
##### Data cleanup
1007

    
1008
def cleanup_table(db, table, cols):
1009
    def esc_name_(name): return esc_name(db, name)
1010
    
1011
    table = sql_gen.as_Table(table).to_str(db)
1012
    cols = map(esc_name_, cols)
1013
    
1014
    run_query(db, 'UPDATE '+table+' SET\n'+(',\n'.join(('\n'+col
1015
        +' = nullif(nullif(trim(both from '+col+"), %(null0)s), %(null1)s)"
1016
            for col in cols))),
1017
        dict(null0='', null1=r'\N'))
(23-23/35)