Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+str(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+str(name), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self,
45
            'for name: '+str(name)+'; value: '+repr(value), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+name+ ' constraint on columns: '
52
            +(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class NameException(DbException): pass
57

    
58
class DuplicateKeyException(ConstraintException): pass
59

    
60
class NullValueException(ConstraintException): pass
61

    
62
class FunctionValueException(ExceptionWithNameValue): pass
63

    
64
class DuplicateTableException(ExceptionWithName): pass
65

    
66
class DuplicateFunctionException(ExceptionWithName): pass
67

    
68
class EmptyRowException(DbException): pass
69

    
70
##### Warnings
71

    
72
class DbWarning(UserWarning): pass
73

    
74
##### Result retrieval
75

    
76
def col_names(cur): return (col[0] for col in cur.description)
77

    
78
def rows(cur): return iter(lambda: cur.fetchone(), None)
79

    
80
def consume_rows(cur):
81
    '''Used to fetch all rows so result will be cached'''
82
    iters.consume_iter(rows(cur))
83

    
84
def next_row(cur): return rows(cur).next()
85

    
86
def row(cur):
87
    row_ = next_row(cur)
88
    consume_rows(cur)
89
    return row_
90

    
91
def next_value(cur): return next_row(cur)[0]
92

    
93
def value(cur): return row(cur)[0]
94

    
95
def values(cur): return iters.func_iter(lambda: next_value(cur))
96

    
97
def value_or_none(cur):
98
    try: return value(cur)
99
    except StopIteration: return None
100

    
101
##### Input validation
102

    
103
def clean_name(name): return re.sub(r'\W', r'', name.replace('.', '_'))
104

    
105
def check_name(name):
106
    if re.search(r'\W', name) != None: raise NameException('Name "'+name
107
        +'" may contain only alphanumeric characters and _')
108

    
109
def esc_name_by_module(module, name, ignore_case=False):
110
    if module == 'psycopg2' or module == None:
111
        if ignore_case:
112
            # Don't enclose in quotes because this disables case-insensitivity
113
            check_name(name)
114
            return name
115
        else: quote = '"'
116
    elif module == 'MySQLdb': quote = '`'
117
    else: raise NotImplementedError("Can't escape name for "+module+' database')
118
    return quote + name.replace(quote, '') + quote
119

    
120
def esc_name_by_engine(engine, name, **kw_args):
121
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
122

    
123
def esc_name(db, name, **kw_args):
124
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
125

    
126
def qual_name(db, schema, table):
127
    def esc_name_(name): return esc_name(db, name)
128
    table = esc_name_(table)
129
    if schema != None: return esc_name_(schema)+'.'+table
130
    else: return table
131

    
132
##### Database connections
133

    
134
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
135

    
136
db_engines = {
137
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
138
    'PostgreSQL': ('psycopg2', {}),
139
}
140

    
141
DatabaseErrors_set = set([DbException])
142
DatabaseErrors = tuple(DatabaseErrors_set)
143

    
144
def _add_module(module):
145
    DatabaseErrors_set.add(module.DatabaseError)
146
    global DatabaseErrors
147
    DatabaseErrors = tuple(DatabaseErrors_set)
148

    
149
def db_config_str(db_config):
150
    return db_config['engine']+' database '+db_config['database']
151

    
152
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
153

    
154
log_debug_none = lambda msg, level=2: None
155

    
156
class DbConn:
157
    def __init__(self, db_config, serializable=True, autocommit=False,
158
        caching=True, log_debug=log_debug_none):
159
        self.db_config = db_config
160
        self.serializable = serializable
161
        self.autocommit = autocommit
162
        self.caching = caching
163
        self.log_debug = log_debug
164
        self.debug = log_debug != log_debug_none
165
        
166
        self.__db = None
167
        self.query_results = {}
168
        self._savepoint = 0
169
    
170
    def __getattr__(self, name):
171
        if name == '__dict__': raise Exception('getting __dict__')
172
        if name == 'db': return self._db()
173
        else: raise AttributeError()
174
    
175
    def __getstate__(self):
176
        state = copy.copy(self.__dict__) # shallow copy
177
        state['log_debug'] = None # don't pickle the debug callback
178
        state['_DbConn__db'] = None # don't pickle the connection
179
        return state
180
    
181
    def connected(self): return self.__db != None
182
    
183
    def _db(self):
184
        if self.__db == None:
185
            # Process db_config
186
            db_config = self.db_config.copy() # don't modify input!
187
            schemas = db_config.pop('schemas', None)
188
            module_name, mappings = db_engines[db_config.pop('engine')]
189
            module = __import__(module_name)
190
            _add_module(module)
191
            for orig, new in mappings.iteritems():
192
                try: util.rename_key(db_config, orig, new)
193
                except KeyError: pass
194
            
195
            # Connect
196
            self.__db = module.connect(**db_config)
197
            
198
            # Configure connection
199
            if self.serializable and not self.autocommit: run_raw_query(self,
200
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE')
201
            if schemas != None:
202
                schemas_ = ''.join((esc_name(self, s)+', '
203
                    for s in schemas.split(',')))
204
                run_raw_query(self, "SELECT set_config('search_path', \
205
%s || current_setting('search_path'), false)", [schemas_])
206
        
207
        return self.__db
208
    
209
    class DbCursor(Proxy):
210
        def __init__(self, outer):
211
            Proxy.__init__(self, outer.db.cursor())
212
            self.outer = outer
213
            self.query_results = outer.query_results
214
            self.query_lookup = None
215
            self.result = []
216
        
217
        def execute(self, query, params=None):
218
            self._is_insert = query.upper().find('INSERT') >= 0
219
            self.query_lookup = _query_lookup(query, params)
220
            try:
221
                try:
222
                    return_value = self.inner.execute(query, params)
223
                    self.outer.do_autocommit()
224
                finally: self.query = get_cur_query(self.inner)
225
            except Exception, e:
226
                _add_cursor_info(e, self, query, params)
227
                self.result = e # cache the exception as the result
228
                self._cache_result()
229
                raise
230
            # Fetch all rows so result will be cached
231
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
232
            return return_value
233
        
234
        def fetchone(self):
235
            row = self.inner.fetchone()
236
            if row != None: self.result.append(row)
237
            # otherwise, fetched all rows
238
            else: self._cache_result()
239
            return row
240
        
241
        def _cache_result(self):
242
            # For inserts, only cache exceptions since inserts are not
243
            # idempotent, but an invalid insert will always be invalid
244
            if self.query_results != None and (not self._is_insert
245
                or isinstance(self.result, Exception)):
246
                
247
                assert self.query_lookup != None
248
                self.query_results[self.query_lookup] = self.CacheCursor(
249
                    util.dict_subset(dicts.AttrsDictView(self),
250
                    ['query', 'result', 'rowcount', 'description']))
251
        
252
        class CacheCursor:
253
            def __init__(self, cached_result): self.__dict__ = cached_result
254
            
255
            def execute(self, *args, **kw_args):
256
                if isinstance(self.result, Exception): raise self.result
257
                # otherwise, result is a rows list
258
                self.iter = iter(self.result)
259
            
260
            def fetchone(self):
261
                try: return self.iter.next()
262
                except StopIteration: return None
263
    
264
    def esc_value(self, value):
265
        module = util.root_module(self.db)
266
        if module == 'psycopg2': str_ = self.db.cursor().mogrify('%s', [value])
267
        elif module == 'MySQLdb':
268
            import _mysql
269
            str_ = _mysql.escape_string(value)
270
        else: raise NotImplementedError("Can't escape value for "+module
271
            +' database')
272
        return strings.to_unicode(str_)
273
    
274
    def esc_name(self, name): return esc_name(self, name) # calls global func
275
    
276
    def run_query(self, query, params=None, cacheable=False, log_level=2,
277
        exc_log_level=None):
278
        '''
279
        @param exc_log_level The log_level if the query throws an exception.
280
            Defaults to the value of log_level.
281
        '''
282
        assert query != None
283
        if exc_log_level == None: exc_log_level = log_level
284
        
285
        if not self.caching: cacheable = False
286
        used_cache = False
287
        success = False
288
        try:
289
            # Get cursor
290
            if cacheable:
291
                query_lookup = _query_lookup(query, params)
292
                try:
293
                    cur = self.query_results[query_lookup]
294
                    used_cache = True
295
                except KeyError: cur = self.DbCursor(self)
296
            else: cur = self.db.cursor()
297
            
298
            # Run query
299
            cur.execute(query, params)
300
            
301
            success = True
302
        finally:
303
            if self.debug: # only compute msg if needed
304
                if not success: log_level = exc_log_level
305
                if used_cache: cache_status = 'Cache hit'
306
                elif cacheable: cache_status = 'Cache miss'
307
                else: cache_status = 'Non-cacheable'
308
                self.log_debug(cache_status+': '+strings.one_line(
309
                    str(get_cur_query(cur, query, params))), log_level)
310
        
311
        return cur
312
    
313
    def is_cached(self, query, params=None):
314
        return _query_lookup(query, params) in self.query_results
315
    
316
    def with_savepoint(self, func):
317
        savepoint = 'level_'+str(self._savepoint)
318
        self.run_query('SAVEPOINT '+savepoint, log_level=4)
319
        self._savepoint += 1
320
        try: 
321
            try: return_val = func()
322
            finally:
323
                self._savepoint -= 1
324
                assert self._savepoint >= 0
325
        except:
326
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint, log_level=4)
327
            raise
328
        else:
329
            self.run_query('RELEASE SAVEPOINT '+savepoint, log_level=4)
330
            self.do_autocommit()
331
            return return_val
332
    
333
    def do_autocommit(self):
334
        '''Autocommits if outside savepoint'''
335
        assert self._savepoint >= 0
336
        if self.autocommit and self._savepoint == 0:
337
            self.log_debug('Autocommiting')
338
            self.db.commit()
339

    
340
connect = DbConn
341

    
342
##### Querying
343

    
344
def run_raw_query(db, *args, **kw_args):
345
    '''For params, see DbConn.run_query()'''
346
    return db.run_query(*args, **kw_args)
347

    
348
def mogrify(db, query, params):
349
    module = util.root_module(db.db)
350
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
351
    else: raise NotImplementedError("Can't mogrify query for "+module+
352
        ' database')
353

    
354
##### Recoverable querying
355

    
356
def with_savepoint(db, func): return db.with_savepoint(func)
357

    
358
def run_query(db, query, params=None, recover=None, cacheable=False, **kw_args):
359
    '''For params, see run_raw_query()'''
360
    if recover == None: recover = False
361
    
362
    try:
363
        def run(): return run_raw_query(db, query, params, cacheable, **kw_args)
364
        if recover and not db.is_cached(query, params):
365
            return with_savepoint(db, run)
366
        else: return run() # don't need savepoint if cached
367
    except Exception, e:
368
        if not recover: raise # need savepoint to run index_cols()
369
        msg = exc.str_(e)
370
        
371
        match = re.search(r'duplicate key value violates unique constraint '
372
            r'"((_?[^\W_]+)_[^"]+?)"', msg)
373
        if match:
374
            constraint, table = match.groups()
375
            try: cols = index_cols(db, table, constraint)
376
            except NotImplementedError: raise e
377
            else: raise DuplicateKeyException(constraint, cols, e)
378
        
379
        match = re.search(r'null value in column "(\w+?)" violates not-null '
380
            r'constraint', msg)
381
        if match: raise NullValueException('NOT NULL', [match.group(1)], e)
382
        
383
        match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
384
            r'|date/time field value out of range): "(.+?)"\n'
385
            r'(?:(?s).*?)\bfunction "(\w+?)".*?\bat assignment', msg)
386
        if match:
387
            value, name = match.groups()
388
            raise FunctionValueException(name, strings.to_unicode(value), e)
389
        
390
        match = re.search(r'relation "(\w+?)" already exists', msg)
391
        if match: raise DuplicateTableException(match.group(1), e)
392
        
393
        match = re.search(r'function "(\w+?)" already exists', msg)
394
        if match: raise DuplicateFunctionException(match.group(1), e)
395
        
396
        raise # no specific exception raised
397

    
398
##### Basic queries
399

    
400
def next_version(name):
401
    '''Prepends the version # so it won't be removed if the name is truncated'''
402
    version = 1 # first existing name was version 0
403
    match = re.match(r'^v(\d+)_(.*)$', name)
404
    if match:
405
        version = int(match.group(1))+1
406
        name = match.group(2)
407
    return 'v'+str(version)+'_'+name
408

    
409
def run_query_into(db, query, params, into=None, *args, **kw_args):
410
    '''Outputs a query to a temp table.
411
    For params, see run_query().
412
    '''
413
    if into == None: return run_query(db, query, params, *args, **kw_args)
414
    else: # place rows in temp table
415
        assert isinstance(into, sql_gen.Table)
416
        
417
        kw_args['recover'] = True
418
        kw_args.setdefault('exc_log_level', kw_args.get('log_level', 2) + 2)
419
            # by default, will have exc_log_level=4
420
        
421
        temp = not db.debug # tables are created as permanent in debug mode
422
        # "temporary tables cannot specify a schema name", so remove schema
423
        if temp: into.schema = None
424
        
425
        while True:
426
            try:
427
                create_query = 'CREATE'
428
                if temp: create_query += ' TEMP'
429
                create_query += ' TABLE '+into.to_str(db)+' AS '+query
430
                
431
                return run_query(db, create_query, params, *args, **kw_args)
432
                    # CREATE TABLE AS sets rowcount to # rows in query
433
            except DuplicateTableException, e:
434
                into.name = next_version(into.name)
435
                # try again with next version of name
436

    
437
order_by_pkey = object() # tells mk_select() to order by the pkey
438

    
439
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
440

    
441
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
442
    start=None, order_by=order_by_pkey, default_table=None):
443
    '''
444
    @param tables The single table to select from, or a list of tables to join
445
        together, with tables after the first being sql_gen.Join objects
446
    @param fields Use None to select all fields in the table
447
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
448
        * container can be any iterable type
449
        * compare_left_side: sql_gen.Code|str (for col name)
450
        * compare_right_side: sql_gen.ValueCond|literal value
451
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
452
        use all columns
453
    @return tuple(query, params)
454
    '''
455
    # Parse tables param
456
    if not lists.is_seq(tables): tables = [tables]
457
    tables = list(tables) # don't modify input! (list() copies input)
458
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
459
    
460
    # Parse other params
461
    if conds == None: conds = []
462
    elif isinstance(conds, dict): conds = conds.items()
463
    conds = list(conds) # don't modify input! (list() copies input)
464
    assert limit == None or type(limit) == int
465
    assert start == None or type(start) == int
466
    if order_by is order_by_pkey:
467
        if distinct_on != []: order_by = None
468
        else: order_by = pkey(db, table0, recover=True)
469
    
470
    query = 'SELECT'
471
    
472
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
473
    
474
    # DISTINCT ON columns
475
    if distinct_on != []:
476
        query += ' DISTINCT'
477
        if distinct_on is not distinct_on_all:
478
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
479
    
480
    # Columns
481
    query += ' '
482
    if fields == None: query += '*'
483
    else: query += ', '.join(map(parse_col, fields))
484
    
485
    # Main table
486
    query += ' FROM '+table0.to_str(db)
487
    
488
    # Add joins
489
    left_table = table0
490
    for join_ in tables:
491
        table = join_.table
492
        
493
        # Parse special values
494
        if join_.type_ is sql_gen.filter_out: # filter no match
495
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
496
                None))
497
        
498
        query += ' '+join_.to_str(db, left_table)
499
        
500
        left_table = table
501
    
502
    missing = True
503
    if conds != []:
504
        query += ' WHERE '+(' AND '.join(('('+sql_gen.ColValueCond(l, r)
505
            .to_str(db)+')' for l, r in conds)))
506
        missing = False
507
    if order_by != None:
508
        query += ' ORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
509
    if limit != None: query += ' LIMIT '+str(limit); missing = False
510
    if start != None:
511
        if start != 0: query += ' OFFSET '+str(start)
512
        missing = False
513
    if missing: warnings.warn(DbWarning(
514
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
515
    
516
    return (query, [])
517

    
518
def select(db, *args, **kw_args):
519
    '''For params, see mk_select() and run_query()'''
520
    recover = kw_args.pop('recover', None)
521
    cacheable = kw_args.pop('cacheable', True)
522
    log_level = kw_args.pop('log_level', 2)
523
    
524
    query, params = mk_select(db, *args, **kw_args)
525
    return run_query(db, query, params, recover, cacheable, log_level=log_level)
526

    
527
def mk_insert_select(db, table, cols=None, select_query=None, params=None,
528
    returning=None, embeddable=False):
529
    '''
530
    @param returning str|None An inserted column (such as pkey) to return
531
    @param embeddable Whether the query should be embeddable as a nested SELECT.
532
        Warning: If you set this and cacheable=True when the query is run, the
533
        query will be fully cached, not just if it raises an exception.
534
    '''
535
    table = sql_gen.as_Table(table)
536
    if cols == []: cols = None # no cols (all defaults) = unknown col names
537
    if cols != None: cols = [sql_gen.as_Col(v).to_str(db) for v in cols]
538
    if select_query == None: select_query = 'DEFAULT VALUES'
539
    if returning != None: returning = sql_gen.as_Col(returning, table)
540
    
541
    # Build query
542
    query = 'INSERT INTO '+table.to_str(db)
543
    if cols != None: query += ' ('+', '.join(cols)+')'
544
    query += ' '+select_query
545
    
546
    if returning != None:
547
        returning_name = copy.copy(returning)
548
        returning_name.table = None
549
        returning_name = returning_name.to_str(db)
550
        query += ' RETURNING '+returning_name
551
    
552
    if embeddable:
553
        assert returning != None
554
        
555
        # Create function
556
        function_name = '_'.join(['insert', table.name] + cols)
557
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
558
        while True:
559
            try:
560
                func_schema = None
561
                if not db.debug: func_schema = 'pg_temp'
562
                function = sql_gen.Table(function_name, func_schema).to_str(db)
563
                
564
                function_query = '''\
565
CREATE FUNCTION '''+function+'''() RETURNS '''+return_type+'''
566
    LANGUAGE sql
567
    AS $$'''+mogrify(db, query, params)+''';$$;
568
'''
569
                run_query(db, function_query, recover=True, cacheable=True,
570
                    exc_log_level=4)
571
                break # this version was successful
572
            except DuplicateFunctionException, e:
573
                function_name = next_version(function_name)
574
                # try again with next version of name
575
        
576
        # Return query that uses function
577
        func_table = sql_gen.NamedTable('f', sql_gen.CustomCode(function+'()'),
578
            [returning_name]) # AS clause requires function alias
579
        return mk_select(db, func_table, start=0, order_by=None)
580
    
581
    return (query, params)
582

    
583
def insert_select(db, *args, **kw_args):
584
    '''For params, see mk_insert_select() and run_query_into()
585
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
586
        values in
587
    '''
588
    into = kw_args.pop('into', None)
589
    if into != None: kw_args['embeddable'] = True
590
    recover = kw_args.pop('recover', None)
591
    cacheable = kw_args.pop('cacheable', True)
592
    
593
    query, params = mk_insert_select(db, *args, **kw_args)
594
    return run_query_into(db, query, params, into, recover=recover,
595
        cacheable=cacheable)
596

    
597
default = object() # tells insert() to use the default value for a column
598

    
599
def insert(db, table, row, *args, **kw_args):
600
    '''For params, see insert_select()'''
601
    if lists.is_seq(row): cols = None
602
    else:
603
        cols = row.keys()
604
        row = row.values()
605
    row = list(row) # ensure that "!= []" works
606
    
607
    # Check for special values
608
    labels = []
609
    values = []
610
    for value in row:
611
        if value is default: labels.append('DEFAULT')
612
        else:
613
            labels.append('%s')
614
            values.append(value)
615
    
616
    # Build query
617
    if values != []: query = ' VALUES ('+(', '.join(labels))+')'
618
    else: query = None
619
    
620
    return insert_select(db, table, cols, query, values, *args, **kw_args)
621

    
622
def mk_update(db, table, changes=None, cond=None):
623
    '''
624
    @param changes [(col, new_value),...]
625
        * container can be any iterable type
626
        * col: sql_gen.Code|str (for col name)
627
        * new_value: sql_gen.Code|literal value
628
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
629
    @return str query
630
    '''
631
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
632
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
633
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
634
    if cond != None: query += ' WHERE '+cond.to_str(db)
635
    
636
    return query
637

    
638
def update(db, *args, **kw_args):
639
    '''For params, see mk_update() and run_query()'''
640
    recover = kw_args.pop('recover', None)
641
    
642
    return run_query(db, mk_update(db, *args, **kw_args), [], recover)
643

    
644
def last_insert_id(db):
645
    module = util.root_module(db.db)
646
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
647
    elif module == 'MySQLdb': return db.insert_id()
648
    else: return None
649

    
650
def truncate(db, table, schema='public'):
651
    return run_query(db, 'TRUNCATE '+qual_name(db, schema, table)+' CASCADE')
652

    
653
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
654
    '''Creates a mapping from original column names (which may have collisions)
655
    to names that will be distinct among the columns' tables.
656
    This is meant to be used for several tables that are being joined together.
657
    @param cols The columns to combine. Duplicates will be removed.
658
    @param into The table for the new columns.
659
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
660
        columns will be included in the mapping even if they are not in cols.
661
        The tables of the provided Col objects will be changed to into, so make
662
        copies of them if you want to keep the original tables.
663
    @param as_items Whether to return a list of dict items instead of a dict
664
    @return dict(orig_col=new_col, ...)
665
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
666
        * new_col: sql_gen.Col(orig_col_name, into)
667
        * All mappings use the into table so its name can easily be
668
          changed for all columns at once
669
    '''
670
    cols = lists.uniqify(cols)
671
    
672
    items = []
673
    for col in preserve:
674
        orig_col = copy.copy(col)
675
        col.table = into
676
        items.append((orig_col, col))
677
    preserve = set(preserve)
678
    for col in cols:
679
        if col not in preserve:
680
            items.append((col, sql_gen.Col(clean_name(str(col)), into)))
681
    
682
    if not as_items: items = dict(items)
683
    return items
684

    
685
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
686
    '''For params, see mk_flatten_mapping()
687
    @return See return value of mk_flatten_mapping()
688
    '''
689
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
690
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
691
    run_query_into(db, *mk_select(db, joins, cols, limit=limit, start=start),
692
        into=into)
693
    return dict(items)
694

    
695
##### Database structure queries
696

    
697
def table_row_count(db, table, recover=None):
698
    return value(run_query(db, *mk_select(db, table, [sql_gen.row_count],
699
        order_by=None, start=0), recover=recover, log_level=3))
700

    
701
def table_cols(db, table, recover=None):
702
    return list(col_names(select(db, table, limit=0, order_by=None,
703
        recover=recover, log_level=4)))
704

    
705
def pkey(db, table, recover=None):
706
    '''Assumed to be first column in table'''
707
    return table_cols(db, table, recover)[0]
708

    
709
not_null_col = 'not_null'
710

    
711
def table_not_null_col(db, table, recover=None):
712
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
713
    if not_null_col in table_cols(db, table, recover): return not_null_col
714
    else: return pkey(db, table, recover)
715

    
716
def index_cols(db, table, index):
717
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
718
    automatically created. When you don't know whether something is a UNIQUE
719
    constraint or a UNIQUE index, use this function.'''
720
    module = util.root_module(db.db)
721
    if module == 'psycopg2':
722
        return list(values(run_query(db, '''\
723
SELECT attname
724
FROM
725
(
726
        SELECT attnum, attname
727
        FROM pg_index
728
        JOIN pg_class index ON index.oid = indexrelid
729
        JOIN pg_class table_ ON table_.oid = indrelid
730
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
731
        WHERE
732
            table_.relname = %(table)s
733
            AND index.relname = %(index)s
734
    UNION
735
        SELECT attnum, attname
736
        FROM
737
        (
738
            SELECT
739
                indrelid
740
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
741
                    AS indkey
742
            FROM pg_index
743
            JOIN pg_class index ON index.oid = indexrelid
744
            JOIN pg_class table_ ON table_.oid = indrelid
745
            WHERE
746
                table_.relname = %(table)s
747
                AND index.relname = %(index)s
748
        ) s
749
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
750
) s
751
ORDER BY attnum
752
''',
753
            {'table': table, 'index': index}, cacheable=True, log_level=4)))
754
    else: raise NotImplementedError("Can't list index columns for "+module+
755
        ' database')
756

    
757
def constraint_cols(db, table, constraint):
758
    module = util.root_module(db.db)
759
    if module == 'psycopg2':
760
        return list(values(run_query(db, '''\
761
SELECT attname
762
FROM pg_constraint
763
JOIN pg_class ON pg_class.oid = conrelid
764
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
765
WHERE
766
    relname = %(table)s
767
    AND conname = %(constraint)s
768
ORDER BY attnum
769
''',
770
            {'table': table, 'constraint': constraint})))
771
    else: raise NotImplementedError("Can't list constraint columns for "+module+
772
        ' database')
773

    
774
row_num_col = '_row_num'
775

    
776
def index_col(db, col):
777
    '''Adds an index on a column if it doesn't already exist.'''
778
    assert sql_gen.is_table_col(col)
779
    
780
    table = col.table
781
    index = sql_gen.as_Table(clean_name(str(col)))
782
    col = sql_gen.to_name_only_col(col)
783
    try: run_query(db, 'CREATE INDEX '+index.to_str(db)+' ON '+table.to_str(db)
784
        +' ('+col.to_str(db)+')', recover=True, cacheable=True, log_level=3)
785
    except DuplicateTableException: pass # index already existed
786

    
787
def index_pkey(db, table, recover=None):
788
    '''Makes the first column in a table the primary key.
789
    @pre The table must not already have a primary key.
790
    '''
791
    table = sql_gen.as_Table(table)
792
    
793
    index = sql_gen.as_Table(table.name+'_pkey')
794
    col = sql_gen.to_name_only_col(pkey(db, table, recover))
795
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD CONSTRAINT '
796
        +index.to_str(db)+' PRIMARY KEY('+col.to_str(db)+')', recover=recover,
797
        log_level=3)
798

    
799
def add_row_num(db, table):
800
    '''Adds a row number column to a table. Its name is in row_num_col. It will
801
    be the primary key.'''
802
    table = sql_gen.as_Table(table).to_str(db)
803
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
804
        +' serial NOT NULL PRIMARY KEY', log_level=3)
805

    
806
def tables(db, schema='public', table_like='%'):
807
    module = util.root_module(db.db)
808
    params = {'schema': schema, 'table_like': table_like}
809
    if module == 'psycopg2':
810
        return values(run_query(db, '''\
811
SELECT tablename
812
FROM pg_tables
813
WHERE
814
    schemaname = %(schema)s
815
    AND tablename LIKE %(table_like)s
816
ORDER BY tablename
817
''',
818
            params, cacheable=True))
819
    elif module == 'MySQLdb':
820
        return values(run_query(db, 'SHOW TABLES LIKE %(table_like)s', params,
821
            cacheable=True))
822
    else: raise NotImplementedError("Can't list tables for "+module+' database')
823

    
824
##### Database management
825

    
826
def empty_db(db, schema='public', **kw_args):
827
    '''For kw_args, see tables()'''
828
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
829

    
830
##### Heuristic queries
831

    
832
def put(db, table, row, pkey_=None, row_ct_ref=None):
833
    '''Recovers from errors.
834
    Only works under PostgreSQL (uses INSERT RETURNING).
835
    '''
836
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
837
    
838
    try:
839
        cur = insert(db, table, row, pkey_, recover=True)
840
        if row_ct_ref != None and cur.rowcount >= 0:
841
            row_ct_ref[0] += cur.rowcount
842
        return value(cur)
843
    except DuplicateKeyException, e:
844
        return value(select(db, table, [pkey_],
845
            util.dict_subset_right_join(row, e.cols), recover=True))
846

    
847
def get(db, table, row, pkey, row_ct_ref=None, create=False):
848
    '''Recovers from errors'''
849
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
850
    except StopIteration:
851
        if not create: raise
852
        return put(db, table, row, pkey, row_ct_ref) # insert new row
853

    
854
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None):
855
    '''Recovers from errors.
856
    Only works under PostgreSQL (uses INSERT RETURNING).
857
    @param in_tables The main input table to select from, followed by a list of
858
        tables to join with it using the main input table's pkey
859
    @param mapping dict(out_table_col=in_table_col, ...)
860
        * out_table_col: sql_gen.Col|str
861
        * in_table_col: sql_gen.Col Wrap literal values in a sql_gen.NamedCol
862
    @return sql_gen.Col Where the output pkeys are made available
863
    '''
864
    out_table = sql_gen.as_Table(out_table)
865
    for in_table_col in mapping.itervalues():
866
        assert isinstance(in_table_col, sql_gen.Col)
867
    
868
    def log_debug(msg): db.log_debug(msg, level=1.5)
869
    
870
    temp_prefix = out_table.name
871
    pkeys = sql_gen.Table(temp_prefix+'_pkeys')
872
    
873
    # Create input joins from list of input tables
874
    in_tables_ = in_tables[:] # don't modify input!
875
    in_tables0 = in_tables_.pop(0) # first table is separate
876
    in_pkey = pkey(db, in_tables0, recover=True)
877
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
878
    input_joins = [in_tables0]+[sql_gen.Join(v,
879
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
880
    
881
    log_debug('Joining together input tables')
882
    # Place in new table for speed and so don't modify input if values edited
883
    in_table = sql_gen.Table(temp_prefix+'_in')
884
    flatten_cols = filter(sql_gen.is_table_col, mapping.values())
885
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins,
886
        flatten_cols, preserve=[in_pkey_col], start=0))
887
    input_joins = [in_table]
888
    
889
    out_pkey = pkey(db, out_table, recover=True)
890
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
891
    
892
    pkeys_names = [in_pkey, out_pkey]
893
    pkeys_cols = [in_pkey_col, out_pkey_col]
894
    
895
    pkeys_table_exists_ref = [False]
896
    def insert_into_pkeys(joins, cols):
897
        query, params = mk_select(db, joins, cols, order_by=None, start=0)
898
        if pkeys_table_exists_ref[0]:
899
            insert_select(db, pkeys, pkeys_names, query, params)
900
        else:
901
            run_query_into(db, query, params, into=pkeys)
902
            pkeys_table_exists_ref[0] = True
903
    
904
    limit_ref = [None]
905
    conds = set()
906
    distinct_on = []
907
    def mk_main_select(joins, cols):
908
        return mk_select(db, joins, cols, conds, distinct_on,
909
            limit=limit_ref[0], start=0)
910
    
911
    def log_exc(e):
912
        log_debug('Caught exception: '+exc.str_(e, first_line_only=True))
913
    def remove_all_rows():
914
        log_debug('Returning NULL for all rows')
915
        limit_ref[0] = 0 # just create an empty pkeys table
916
    def ignore(in_col, value):
917
        in_col_str = str(in_col)
918
        log_debug('Adding index on '+in_col_str+' to enable fast filtering')
919
        index_col(db, in_col)
920
        log_debug('Ignoring rows with '+in_col_str+' = '+repr(value))
921
    def remove_rows(in_col, value):
922
        ignore(in_col, value)
923
        cond = (in_col, sql_gen.CompareCond(value, '!='))
924
        assert cond not in conds # avoid infinite loops
925
        conds.add(cond)
926
    def invalid2null(in_col, value):
927
        ignore(in_col, value)
928
        update(db, in_table, [(in_col, None)],
929
            sql_gen.ColValueCond(in_col, value))
930
    
931
    # Do inserts and selects
932
    join_cols = {}
933
    insert_out_pkeys = sql_gen.Table(temp_prefix+'_insert_out_pkeys')
934
    insert_in_pkeys = sql_gen.Table(temp_prefix+'_insert_in_pkeys')
935
    while True:
936
        has_joins = join_cols != {}
937
        
938
        # Prepare to insert new rows
939
        insert_joins = input_joins[:] # don't modify original!
940
        insert_args = dict(recover=True, cacheable=False)
941
        if has_joins:
942
            distinct_on = [v.to_Col() for v in join_cols.values()]
943
            insert_joins.append(sql_gen.Join(out_table, join_cols,
944
                sql_gen.filter_out))
945
        else:
946
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
947
        
948
        log_debug('Inserting new rows')
949
        try:
950
            cur = insert_select(db, out_table, mapping.keys(),
951
                *mk_main_select(insert_joins, mapping.values()), **insert_args)
952
            break # insert successful
953
        except DuplicateKeyException, e:
954
            log_exc(e)
955
            
956
            old_join_cols = join_cols.copy()
957
            join_cols.update(util.dict_subset(mapping, e.cols))
958
            log_debug('Ignoring existing rows, comparing on '+str(join_cols))
959
            assert join_cols != old_join_cols # avoid infinite loops
960
        except NullValueException, e:
961
            log_exc(e)
962
            
963
            out_col, = e.cols
964
            try: in_col = mapping[out_col]
965
            except KeyError:
966
                log_debug('Missing mapping for NOT NULL '+out_col)
967
                remove_all_rows()
968
            else: remove_rows(in_col, None)
969
        except FunctionValueException, e:
970
            log_exc(e)
971
            
972
            assert e.name == out_table.name
973
            out_col = 'value' # assume function param was named "value"
974
            invalid2null(mapping[out_col], e.value)
975
        except DatabaseErrors, e:
976
            log_exc(e)
977
            
978
            msg = 'No handler for exception: '+exc.str_(e, first_line_only=True)
979
            warnings.warn(DbWarning(msg))
980
            log_debug(msg)
981
            remove_all_rows()
982
        # after exception handled, rerun loop with additional constraints
983
    
984
    if row_ct_ref != None and cur.rowcount >= 0:
985
        row_ct_ref[0] += cur.rowcount
986
    
987
    if has_joins:
988
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
989
        log_debug('Getting output pkeys of existing/inserted rows')
990
        insert_into_pkeys(select_joins, pkeys_cols)
991
    else:
992
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
993
        
994
        log_debug('Getting input pkeys for rows in insert')
995
        run_query_into(db, *mk_main_select(input_joins, [in_pkey]),
996
            into=insert_in_pkeys)
997
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
998
        
999
        assert table_row_count(db, insert_out_pkeys) == table_row_count(db,
1000
            insert_in_pkeys)
1001
        
1002
        log_debug('Joining together output and input pkeys')
1003
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
1004
            {row_num_col: sql_gen.join_same_not_null})]
1005
        insert_into_pkeys(pkey_joins, pkeys_names)
1006
    
1007
    log_debug('Adding pkey on returned pkeys table to enable fast joins')
1008
    index_pkey(db, pkeys)
1009
    
1010
    log_debug("Setting missing rows' pkeys to NULL")
1011
    missing_rows_joins = input_joins+[sql_gen.Join(pkeys,
1012
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
1013
        # must use join_same_not_null or query will take forever
1014
    insert_into_pkeys(missing_rows_joins,
1015
        [in_pkey_col, sql_gen.NamedCol(out_pkey, None)])
1016
    
1017
    assert table_row_count(db, pkeys) == table_row_count(db, in_table)
1018
    
1019
    return sql_gen.Col(out_pkey, pkeys)
1020

    
1021
##### Data cleanup
1022

    
1023
def cleanup_table(db, table, cols):
1024
    def esc_name_(name): return esc_name(db, name)
1025
    
1026
    table = sql_gen.as_Table(table).to_str(db)
1027
    cols = map(esc_name_, cols)
1028
    
1029
    run_query(db, 'UPDATE '+table+' SET\n'+(',\n'.join(('\n'+col
1030
        +' = nullif(nullif(trim(both from '+col+"), %(null0)s), %(null1)s)"
1031
            for col in cols))),
1032
        dict(null0='', null1=r'\N'))
(23-23/35)