Project

General

Profile

1
# Database access
2

    
3
import copy
4
import operator
5
import re
6
import warnings
7

    
8
import exc
9
import dicts
10
import iters
11
import lists
12
from Proxy import Proxy
13
import rand
14
import sql_gen
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
def get_cur_query(cur, input_query=None, input_params=None):
21
    raw_query = None
22
    if hasattr(cur, 'query'): raw_query = cur.query
23
    elif hasattr(cur, '_last_executed'): raw_query = cur._last_executed
24
    
25
    if raw_query != None: return raw_query
26
    else: return '[input] '+strings.ustr(input_query)+' % '+repr(input_params)
27

    
28
def _add_cursor_info(e, *args, **kw_args):
29
    '''For params, see get_cur_query()'''
30
    exc.add_msg(e, 'query: '+str(get_cur_query(*args, **kw_args)))
31

    
32
class DbException(exc.ExceptionWithCause):
33
    def __init__(self, msg, cause=None, cur=None):
34
        exc.ExceptionWithCause.__init__(self, msg, cause, cause_newline=True)
35
        if cur != None: _add_cursor_info(self, cur)
36

    
37
class ExceptionWithName(DbException):
38
    def __init__(self, name, cause=None):
39
        DbException.__init__(self, 'for name: '+str(name), cause)
40
        self.name = name
41

    
42
class ExceptionWithNameValue(DbException):
43
    def __init__(self, name, value, cause=None):
44
        DbException.__init__(self,
45
            'for name: '+str(name)+'; value: '+repr(value), cause)
46
        self.name = name
47
        self.value = value
48

    
49
class ConstraintException(DbException):
50
    def __init__(self, name, cols, cause=None):
51
        DbException.__init__(self, 'Violated '+name+ ' constraint on columns: '
52
            +(', '.join(cols)), cause)
53
        self.name = name
54
        self.cols = cols
55

    
56
class NameException(DbException): pass
57

    
58
class DuplicateKeyException(ConstraintException): pass
59

    
60
class NullValueException(ConstraintException): pass
61

    
62
class FunctionValueException(ExceptionWithNameValue): pass
63

    
64
class DuplicateTableException(ExceptionWithName): pass
65

    
66
class DuplicateFunctionException(ExceptionWithName): pass
67

    
68
class EmptyRowException(DbException): pass
69

    
70
##### Warnings
71

    
72
class DbWarning(UserWarning): pass
73

    
74
##### Result retrieval
75

    
76
def col_names(cur): return (col[0] for col in cur.description)
77

    
78
def rows(cur): return iter(lambda: cur.fetchone(), None)
79

    
80
def consume_rows(cur):
81
    '''Used to fetch all rows so result will be cached'''
82
    iters.consume_iter(rows(cur))
83

    
84
def next_row(cur): return rows(cur).next()
85

    
86
def row(cur):
87
    row_ = next_row(cur)
88
    consume_rows(cur)
89
    return row_
90

    
91
def next_value(cur): return next_row(cur)[0]
92

    
93
def value(cur): return row(cur)[0]
94

    
95
def values(cur): return iters.func_iter(lambda: next_value(cur))
96

    
97
def value_or_none(cur):
98
    try: return value(cur)
99
    except StopIteration: return None
100

    
101
##### Input validation
102

    
103
def clean_name(name): return re.sub(r'\W', r'', name.replace('.', '_'))
104

    
105
def check_name(name):
106
    if re.search(r'\W', name) != None: raise NameException('Name "'+name
107
        +'" may contain only alphanumeric characters and _')
108

    
109
def esc_name_by_module(module, name, ignore_case=False):
110
    if module == 'psycopg2' or module == None:
111
        if ignore_case:
112
            # Don't enclose in quotes because this disables case-insensitivity
113
            check_name(name)
114
            return name
115
        else: quote = '"'
116
    elif module == 'MySQLdb': quote = '`'
117
    else: raise NotImplementedError("Can't escape name for "+module+' database')
118
    return quote + name.replace(quote, '') + quote
119

    
120
def esc_name_by_engine(engine, name, **kw_args):
121
    return esc_name_by_module(db_engines[engine][0], name, **kw_args)
122

    
123
def esc_name(db, name, **kw_args):
124
    return esc_name_by_module(util.root_module(db.db), name, **kw_args)
125

    
126
def qual_name(db, schema, table):
127
    def esc_name_(name): return esc_name(db, name)
128
    table = esc_name_(table)
129
    if schema != None: return esc_name_(schema)+'.'+table
130
    else: return table
131

    
132
##### Database connections
133

    
134
db_config_names = ['engine', 'host', 'user', 'password', 'database', 'schemas']
135

    
136
db_engines = {
137
    'MySQL': ('MySQLdb', {'password': 'passwd', 'database': 'db'}),
138
    'PostgreSQL': ('psycopg2', {}),
139
}
140

    
141
DatabaseErrors_set = set([DbException])
142
DatabaseErrors = tuple(DatabaseErrors_set)
143

    
144
def _add_module(module):
145
    DatabaseErrors_set.add(module.DatabaseError)
146
    global DatabaseErrors
147
    DatabaseErrors = tuple(DatabaseErrors_set)
148

    
149
def db_config_str(db_config):
150
    return db_config['engine']+' database '+db_config['database']
151

    
152
def _query_lookup(query, params): return (query, dicts.make_hashable(params))
153

    
154
log_debug_none = lambda msg: None
155

    
156
class DbConn:
157
    def __init__(self, db_config, serializable=True, autocommit=False,
158
        caching=True, log_debug=log_debug_none):
159
        self.db_config = db_config
160
        self.serializable = serializable
161
        self.autocommit = autocommit
162
        self.caching = caching
163
        self.log_debug = log_debug
164
        self.debug = log_debug != log_debug_none
165
        
166
        self.__db = None
167
        self.query_results = {}
168
        self._savepoint = 0
169
    
170
    def __getattr__(self, name):
171
        if name == '__dict__': raise Exception('getting __dict__')
172
        if name == 'db': return self._db()
173
        else: raise AttributeError()
174
    
175
    def __getstate__(self):
176
        state = copy.copy(self.__dict__) # shallow copy
177
        state['log_debug'] = None # don't pickle the debug callback
178
        state['_DbConn__db'] = None # don't pickle the connection
179
        return state
180
    
181
    def connected(self): return self.__db != None
182
    
183
    def _db(self):
184
        if self.__db == None:
185
            # Process db_config
186
            db_config = self.db_config.copy() # don't modify input!
187
            schemas = db_config.pop('schemas', None)
188
            module_name, mappings = db_engines[db_config.pop('engine')]
189
            module = __import__(module_name)
190
            _add_module(module)
191
            for orig, new in mappings.iteritems():
192
                try: util.rename_key(db_config, orig, new)
193
                except KeyError: pass
194
            
195
            # Connect
196
            self.__db = module.connect(**db_config)
197
            
198
            # Configure connection
199
            if self.serializable and not self.autocommit: run_raw_query(self,
200
                'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE')
201
            if schemas != None:
202
                schemas_ = ''.join((esc_name(self, s)+', '
203
                    for s in schemas.split(',')))
204
                run_raw_query(self, "SELECT set_config('search_path', \
205
%s || current_setting('search_path'), false)", [schemas_])
206
        
207
        return self.__db
208
    
209
    class DbCursor(Proxy):
210
        def __init__(self, outer):
211
            Proxy.__init__(self, outer.db.cursor())
212
            self.outer = outer
213
            self.query_results = outer.query_results
214
            self.query_lookup = None
215
            self.result = []
216
        
217
        def execute(self, query, params=None):
218
            self._is_insert = query.upper().find('INSERT') >= 0
219
            self.query_lookup = _query_lookup(query, params)
220
            try:
221
                try:
222
                    return_value = self.inner.execute(query, params)
223
                    self.outer.do_autocommit()
224
                finally: self.query = get_cur_query(self.inner)
225
            except Exception, e:
226
                _add_cursor_info(e, self, query, params)
227
                self.result = e # cache the exception as the result
228
                self._cache_result()
229
                raise
230
            # Fetch all rows so result will be cached
231
            if self.rowcount == 0 and not self._is_insert: consume_rows(self)
232
            return return_value
233
        
234
        def fetchone(self):
235
            row = self.inner.fetchone()
236
            if row != None: self.result.append(row)
237
            # otherwise, fetched all rows
238
            else: self._cache_result()
239
            return row
240
        
241
        def _cache_result(self):
242
            # For inserts, only cache exceptions since inserts are not
243
            # idempotent, but an invalid insert will always be invalid
244
            if self.query_results != None and (not self._is_insert
245
                or isinstance(self.result, Exception)):
246
                
247
                assert self.query_lookup != None
248
                self.query_results[self.query_lookup] = self.CacheCursor(
249
                    util.dict_subset(dicts.AttrsDictView(self),
250
                    ['query', 'result', 'rowcount', 'description']))
251
        
252
        class CacheCursor:
253
            def __init__(self, cached_result): self.__dict__ = cached_result
254
            
255
            def execute(self, *args, **kw_args):
256
                if isinstance(self.result, Exception): raise self.result
257
                # otherwise, result is a rows list
258
                self.iter = iter(self.result)
259
            
260
            def fetchone(self):
261
                try: return self.iter.next()
262
                except StopIteration: return None
263
    
264
    def esc_value(self, value):
265
        module = util.root_module(self.db)
266
        if module == 'psycopg2': str_ = self.db.cursor().mogrify('%s', [value])
267
        elif module == 'MySQLdb':
268
            import _mysql
269
            str_ = _mysql.escape_string(value)
270
        else: raise NotImplementedError("Can't escape value for "+module
271
            +' database')
272
        return strings.to_unicode(str_)
273
    
274
    def esc_name(self, name): return esc_name(self, name) # calls global func
275
    
276
    def run_query(self, query, params=None, cacheable=False):
277
        '''Translates known DB errors to typed exceptions:
278
        See self.DbCursor.execute().'''
279
        assert query != None
280
        
281
        if not self.caching: cacheable = False
282
        used_cache = False
283
        try:
284
            # Get cursor
285
            if cacheable:
286
                query_lookup = _query_lookup(query, params)
287
                try:
288
                    cur = self.query_results[query_lookup]
289
                    used_cache = True
290
                except KeyError: cur = self.DbCursor(self)
291
            else: cur = self.db.cursor()
292
            
293
            # Run query
294
            cur.execute(query, params)
295
        finally:
296
            if self.debug: # only compute msg if needed
297
                if used_cache: cache_status = 'Cache hit'
298
                elif cacheable: cache_status = 'Cache miss'
299
                else: cache_status = 'Non-cacheable'
300
                self.log_debug(cache_status+': '
301
                    +strings.one_line(str(get_cur_query(cur, query, params))))
302
        
303
        return cur
304
    
305
    def is_cached(self, query, params=None):
306
        return _query_lookup(query, params) in self.query_results
307
    
308
    def with_savepoint(self, func):
309
        savepoint = 'level_'+str(self._savepoint)
310
        self.run_query('SAVEPOINT '+savepoint)
311
        self._savepoint += 1
312
        try: 
313
            try: return_val = func()
314
            finally:
315
                self._savepoint -= 1
316
                assert self._savepoint >= 0
317
        except:
318
            self.run_query('ROLLBACK TO SAVEPOINT '+savepoint)
319
            raise
320
        else:
321
            self.run_query('RELEASE SAVEPOINT '+savepoint)
322
            self.do_autocommit()
323
            return return_val
324
    
325
    def do_autocommit(self):
326
        '''Autocommits if outside savepoint'''
327
        assert self._savepoint >= 0
328
        if self.autocommit and self._savepoint == 0:
329
            self.log_debug('Autocommiting')
330
            self.db.commit()
331

    
332
connect = DbConn
333

    
334
##### Querying
335

    
336
def run_raw_query(db, *args, **kw_args):
337
    '''For params, see DbConn.run_query()'''
338
    return db.run_query(*args, **kw_args)
339

    
340
def mogrify(db, query, params):
341
    module = util.root_module(db.db)
342
    if module == 'psycopg2': return db.db.cursor().mogrify(query, params)
343
    else: raise NotImplementedError("Can't mogrify query for "+module+
344
        ' database')
345

    
346
##### Recoverable querying
347

    
348
def with_savepoint(db, func): return db.with_savepoint(func)
349

    
350
def run_query(db, query, params=None, recover=None, cacheable=False):
351
    if recover == None: recover = False
352
    
353
    try:
354
        def run(): return run_raw_query(db, query, params, cacheable)
355
        if recover and not db.is_cached(query, params):
356
            return with_savepoint(db, run)
357
        else: return run() # don't need savepoint if cached
358
    except Exception, e:
359
        if not recover: raise # need savepoint to run index_cols()
360
        msg = exc.str_(e)
361
        
362
        match = re.search(r'duplicate key value violates unique constraint '
363
            r'"((_?[^\W_]+)_[^"]+?)"', msg)
364
        if match:
365
            constraint, table = match.groups()
366
            try: cols = index_cols(db, table, constraint)
367
            except NotImplementedError: raise e
368
            else: raise DuplicateKeyException(constraint, cols, e)
369
        
370
        match = re.search(r'null value in column "(\w+?)" violates not-null '
371
            r'constraint', msg)
372
        if match: raise NullValueException('NOT NULL', [match.group(1)], e)
373
        
374
        match = re.search(r'\b(?:invalid input (?:syntax|value)\b.*?'
375
            r'|date/time field value out of range): "(.+?)"\n'
376
            r'(?:(?s).*?)\bfunction "(\w+?)".*?\bat assignment', msg)
377
        if match:
378
            value, name = match.groups()
379
            raise FunctionValueException(name, strings.to_unicode(value), e)
380
        
381
        match = re.search(r'relation "(\w+?)" already exists', msg)
382
        if match: raise DuplicateTableException(match.group(1), e)
383
        
384
        match = re.search(r'function "(\w+?)" already exists', msg)
385
        if match: raise DuplicateFunctionException(match.group(1), e)
386
        
387
        raise # no specific exception raised
388

    
389
##### Basic queries
390

    
391
def next_version(name):
392
    '''Prepends the version # so it won't be removed if the name is truncated'''
393
    version = 1 # first existing name was version 0
394
    match = re.match(r'^v(\d+)_(.*)$', name)
395
    if match:
396
        version = int(match.group(1))+1
397
        name = match.group(2)
398
    return 'v'+str(version)+'_'+name
399

    
400
def run_query_into(db, query, params, into=None, *args, **kw_args):
401
    '''Outputs a query to a temp table.
402
    For params, see run_query().
403
    '''
404
    if into == None: return run_query(db, query, params, *args, **kw_args)
405
    else: # place rows in temp table
406
        assert isinstance(into, sql_gen.Table)
407
        
408
        kw_args['recover'] = True
409
        while True:
410
            try:
411
                create_query = 'CREATE'
412
                if not db.debug: create_query += ' TEMP'
413
                create_query += ' TABLE '+into.to_str(db)+' AS '+query
414
                
415
                return run_query(db, create_query, params, *args, **kw_args)
416
                    # CREATE TABLE AS sets rowcount to # rows in query
417
            except DuplicateTableException, e:
418
                into.name = next_version(into.name)
419
                # try again with next version of name
420

    
421
order_by_pkey = object() # tells mk_select() to order by the pkey
422

    
423
distinct_on_all = object() # tells mk_select() to SELECT DISTINCT ON all columns
424

    
425
def mk_select(db, tables, fields=None, conds=None, distinct_on=[], limit=None,
426
    start=None, order_by=order_by_pkey, default_table=None):
427
    '''
428
    @param tables The single table to select from, or a list of tables to join
429
        together, with tables after the first being sql_gen.Join objects
430
    @param fields Use None to select all fields in the table
431
    @param conds WHERE conditions: [(compare_left_side, compare_right_side),...]
432
        * container can be any iterable type
433
        * compare_left_side: sql_gen.Code|str (for col name)
434
        * compare_right_side: sql_gen.ValueCond|literal value
435
    @param distinct_on The columns to SELECT DISTINCT ON, or distinct_on_all to
436
        use all columns
437
    @return tuple(query, params)
438
    '''
439
    # Parse tables param
440
    if not lists.is_seq(tables): tables = [tables]
441
    tables = list(tables) # don't modify input! (list() copies input)
442
    table0 = sql_gen.as_Table(tables.pop(0)) # first table is separate
443
    
444
    # Parse other params
445
    if conds == None: conds = []
446
    elif isinstance(conds, dict): conds = conds.items()
447
    conds = list(conds) # don't modify input! (list() copies input)
448
    assert limit == None or type(limit) == int
449
    assert start == None or type(start) == int
450
    if order_by is order_by_pkey:
451
        if distinct_on != []: order_by = None
452
        else: order_by = pkey(db, table0, recover=True)
453
    
454
    query = 'SELECT'
455
    
456
    def parse_col(col): return sql_gen.as_Col(col, default_table).to_str(db)
457
    
458
    # DISTINCT ON columns
459
    if distinct_on != []:
460
        query += ' DISTINCT'
461
        if distinct_on is not distinct_on_all:
462
            query += ' ON ('+(', '.join(map(parse_col, distinct_on)))+')'
463
    
464
    # Columns
465
    query += ' '
466
    if fields == None: query += '*'
467
    else: query += ', '.join(map(parse_col, fields))
468
    
469
    # Main table
470
    query += ' FROM '+table0.to_str(db)
471
    
472
    # Add joins
473
    left_table = table0
474
    for join_ in tables:
475
        table = join_.table
476
        
477
        # Parse special values
478
        if join_.type_ is sql_gen.filter_out: # filter no match
479
            conds.append((sql_gen.Col(table_not_null_col(db, table), table),
480
                None))
481
        
482
        query += ' '+join_.to_str(db, left_table)
483
        
484
        left_table = table
485
    
486
    missing = True
487
    if conds != []:
488
        query += ' WHERE '+(' AND '.join(('('+sql_gen.ColValueCond(l, r)
489
            .to_str(db)+')' for l, r in conds)))
490
        missing = False
491
    if order_by != None:
492
        query += ' ORDER BY '+sql_gen.as_Col(order_by, table0).to_str(db)
493
    if limit != None: query += ' LIMIT '+str(limit); missing = False
494
    if start != None:
495
        if start != 0: query += ' OFFSET '+str(start)
496
        missing = False
497
    if missing: warnings.warn(DbWarning(
498
        'SELECT statement missing a WHERE, LIMIT, or OFFSET clause: '+query))
499
    
500
    return (query, [])
501

    
502
def select(db, *args, **kw_args):
503
    '''For params, see mk_select() and run_query()'''
504
    recover = kw_args.pop('recover', None)
505
    cacheable = kw_args.pop('cacheable', True)
506
    
507
    query, params = mk_select(db, *args, **kw_args)
508
    return run_query(db, query, params, recover, cacheable)
509

    
510
def mk_insert_select(db, table, cols=None, select_query=None, params=None,
511
    returning=None, embeddable=False):
512
    '''
513
    @param returning str|None An inserted column (such as pkey) to return
514
    @param embeddable Whether the query should be embeddable as a nested SELECT.
515
        Warning: If you set this and cacheable=True when the query is run, the
516
        query will be fully cached, not just if it raises an exception.
517
    '''
518
    table = sql_gen.as_Table(table)
519
    if cols == []: cols = None # no cols (all defaults) = unknown col names
520
    if cols != None: cols = [sql_gen.as_Col(v).to_str(db) for v in cols]
521
    if select_query == None: select_query = 'DEFAULT VALUES'
522
    if returning != None: returning = sql_gen.as_Col(returning, table)
523
    
524
    # Build query
525
    query = 'INSERT INTO '+table.to_str(db)
526
    if cols != None: query += ' ('+', '.join(cols)+')'
527
    query += ' '+select_query
528
    
529
    if returning != None:
530
        returning_name = copy.copy(returning)
531
        returning_name.table = None
532
        returning_name = returning_name.to_str(db)
533
        query += ' RETURNING '+returning_name
534
    
535
    if embeddable:
536
        assert returning != None
537
        
538
        # Create function
539
        function_name = '_'.join(['insert', table.name] + cols)
540
        return_type = 'SETOF '+returning.to_str(db)+'%TYPE'
541
        while True:
542
            try:
543
                func_schema = None
544
                if not db.debug: func_schema = 'pg_temp'
545
                function = sql_gen.Table(function_name, func_schema).to_str(db)
546
                
547
                function_query = '''\
548
CREATE FUNCTION '''+function+'''() RETURNS '''+return_type+'''
549
    LANGUAGE sql
550
    AS $$'''+mogrify(db, query, params)+''';$$;
551
'''
552
                run_query(db, function_query, recover=True, cacheable=True)
553
                break # this version was successful
554
            except DuplicateFunctionException, e:
555
                function_name = next_version(function_name)
556
                # try again with next version of name
557
        
558
        # Return query that uses function
559
        func_table = sql_gen.NamedTable('f', sql_gen.CustomCode(function+'()'),
560
            [returning_name]) # AS clause requires function alias
561
        return mk_select(db, func_table, start=0, order_by=None)
562
    
563
    return (query, params)
564

    
565
def insert_select(db, *args, **kw_args):
566
    '''For params, see mk_insert_select() and run_query_into()
567
    @param into sql_gen.Table with suggested name of temp table to put RETURNING
568
        values in
569
    '''
570
    into = kw_args.pop('into', None)
571
    if into != None: kw_args['embeddable'] = True
572
    recover = kw_args.pop('recover', None)
573
    cacheable = kw_args.pop('cacheable', True)
574
    
575
    query, params = mk_insert_select(db, *args, **kw_args)
576
    return run_query_into(db, query, params, into, recover=recover,
577
        cacheable=cacheable)
578

    
579
default = object() # tells insert() to use the default value for a column
580

    
581
def insert(db, table, row, *args, **kw_args):
582
    '''For params, see insert_select()'''
583
    if lists.is_seq(row): cols = None
584
    else:
585
        cols = row.keys()
586
        row = row.values()
587
    row = list(row) # ensure that "!= []" works
588
    
589
    # Check for special values
590
    labels = []
591
    values = []
592
    for value in row:
593
        if value is default: labels.append('DEFAULT')
594
        else:
595
            labels.append('%s')
596
            values.append(value)
597
    
598
    # Build query
599
    if values != []: query = ' VALUES ('+(', '.join(labels))+')'
600
    else: query = None
601
    
602
    return insert_select(db, table, cols, query, values, *args, **kw_args)
603

    
604
def mk_update(db, table, changes=None, cond=None):
605
    '''
606
    @param changes [(col, new_value),...]
607
        * container can be any iterable type
608
        * col: sql_gen.Code|str (for col name)
609
        * new_value: sql_gen.Code|literal value
610
    @param cond sql_gen.Code WHERE condition. e.g. use sql_gen.*Cond objects.
611
    @return str query
612
    '''
613
    query = 'UPDATE '+sql_gen.as_Table(table).to_str(db)+'\nSET\n'
614
    query += ',\n'.join((sql_gen.to_name_only_col(col, table).to_str(db)+' = '
615
        +sql_gen.as_Value(new_value).to_str(db) for col, new_value in changes))
616
    if cond != None: query += ' WHERE '+cond.to_str(db)
617
    
618
    return query
619

    
620
def update(db, *args, **kw_args):
621
    '''For params, see mk_update() and run_query()'''
622
    recover = kw_args.pop('recover', None)
623
    
624
    return run_query(db, mk_update(db, *args, **kw_args), [], recover)
625

    
626
def last_insert_id(db):
627
    module = util.root_module(db.db)
628
    if module == 'psycopg2': return value(run_query(db, 'SELECT lastval()'))
629
    elif module == 'MySQLdb': return db.insert_id()
630
    else: return None
631

    
632
def truncate(db, table, schema='public'):
633
    return run_query(db, 'TRUNCATE '+qual_name(db, schema, table)+' CASCADE')
634

    
635
def mk_flatten_mapping(db, into, cols, preserve=[], as_items=False):
636
    '''Creates a mapping from original column names (which may have collisions)
637
    to names that will be distinct among the columns' tables.
638
    This is meant to be used for several tables that are being joined together.
639
    @param cols The columns to combine. Duplicates will be removed.
640
    @param into The table for the new columns.
641
    @param preserve [sql_gen.Col...] Columns not to rename. Note that these
642
        columns will be included in the mapping even if they are not in cols.
643
        The tables of the provided Col objects will be changed to into, so make
644
        copies of them if you want to keep the original tables.
645
    @param as_items Whether to return a list of dict items instead of a dict
646
    @return dict(orig_col=new_col, ...)
647
        * orig_col: sql_gen.Col(orig_col_name, orig_table)
648
        * new_col: sql_gen.Col(orig_col_name, into)
649
        * All mappings use the into table so its name can easily be
650
          changed for all columns at once
651
    '''
652
    cols = lists.uniqify(cols)
653
    
654
    items = []
655
    for col in preserve:
656
        orig_col = copy.copy(col)
657
        col.table = into
658
        items.append((orig_col, col))
659
    preserve = set(preserve)
660
    for col in cols:
661
        if col not in preserve:
662
            items.append((col, sql_gen.Col(clean_name(str(col)), into)))
663
    
664
    if not as_items: items = dict(items)
665
    return items
666

    
667
def flatten(db, into, joins, cols, limit=None, start=None, **kw_args):
668
    '''For params, see mk_flatten_mapping()
669
    @return See return value of mk_flatten_mapping()
670
    '''
671
    items = mk_flatten_mapping(db, into, cols, as_items=True, **kw_args)
672
    cols = [sql_gen.NamedCol(new.name, old) for old, new in items]
673
    run_query_into(db, *mk_select(db, joins, cols, limit=limit, start=start),
674
        into=into)
675
    return dict(items)
676

    
677
##### Database structure queries
678

    
679
def table_cols(db, table, recover=None):
680
    return list(col_names(select(db, table, limit=0, order_by=None,
681
        recover=recover)))
682

    
683
def pkey(db, table, recover=None):
684
    '''Assumed to be first column in table'''
685
    return table_cols(db, table, recover)[0]
686

    
687
not_null_col = 'not_null'
688

    
689
def table_not_null_col(db, table, recover=None):
690
    '''Name assumed to be the value of not_null_col. If not found, uses pkey.'''
691
    if not_null_col in table_cols(db, table, recover): return not_null_col
692
    else: return pkey(db, table, recover)
693

    
694
def index_cols(db, table, index):
695
    '''Can also use this for UNIQUE constraints, because a UNIQUE index is
696
    automatically created. When you don't know whether something is a UNIQUE
697
    constraint or a UNIQUE index, use this function.'''
698
    module = util.root_module(db.db)
699
    if module == 'psycopg2':
700
        return list(values(run_query(db, '''\
701
SELECT attname
702
FROM
703
(
704
        SELECT attnum, attname
705
        FROM pg_index
706
        JOIN pg_class index ON index.oid = indexrelid
707
        JOIN pg_class table_ ON table_.oid = indrelid
708
        JOIN pg_attribute ON attrelid = indrelid AND attnum = ANY (indkey)
709
        WHERE
710
            table_.relname = %(table)s
711
            AND index.relname = %(index)s
712
    UNION
713
        SELECT attnum, attname
714
        FROM
715
        (
716
            SELECT
717
                indrelid
718
                , (regexp_matches(indexprs, E':varattno (\\\\d+)', 'g'))[1]::int
719
                    AS indkey
720
            FROM pg_index
721
            JOIN pg_class index ON index.oid = indexrelid
722
            JOIN pg_class table_ ON table_.oid = indrelid
723
            WHERE
724
                table_.relname = %(table)s
725
                AND index.relname = %(index)s
726
        ) s
727
        JOIN pg_attribute ON attrelid = indrelid AND attnum = indkey
728
) s
729
ORDER BY attnum
730
''',
731
            {'table': table, 'index': index}, cacheable=True)))
732
    else: raise NotImplementedError("Can't list index columns for "+module+
733
        ' database')
734

    
735
def constraint_cols(db, table, constraint):
736
    module = util.root_module(db.db)
737
    if module == 'psycopg2':
738
        return list(values(run_query(db, '''\
739
SELECT attname
740
FROM pg_constraint
741
JOIN pg_class ON pg_class.oid = conrelid
742
JOIN pg_attribute ON attrelid = conrelid AND attnum = ANY (conkey)
743
WHERE
744
    relname = %(table)s
745
    AND conname = %(constraint)s
746
ORDER BY attnum
747
''',
748
            {'table': table, 'constraint': constraint})))
749
    else: raise NotImplementedError("Can't list constraint columns for "+module+
750
        ' database')
751

    
752
row_num_col = '_row_num'
753

    
754
def index_col(db, col):
755
    '''Adds an index on a column if it doesn't already exist.'''
756
    assert sql_gen.is_table_col(col)
757
    
758
    table = col.table
759
    index = sql_gen.as_Table(clean_name(str(col)))
760
    col = sql_gen.to_name_only_col(col)
761
    try: run_query(db, 'CREATE INDEX '+index.to_str(db)+' ON '+table.to_str(db)
762
        +' ('+col.to_str(db)+')', recover=True, cacheable=True)
763
    except DuplicateTableException: pass # index already existed
764

    
765
def index_pkey(db, table, recover=None):
766
    '''Makes the first column in a table the primary key.
767
    @pre The table must not already have a primary key.
768
    '''
769
    table = sql_gen.as_Table(table)
770
    
771
    index = sql_gen.as_Table(table.name+'_pkey')
772
    col = sql_gen.to_name_only_col(pkey(db, table, recover))
773
    run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD CONSTRAINT '
774
        +index.to_str(db)+' PRIMARY KEY('+col.to_str(db)+')', recover=recover)
775

    
776
def add_row_num(db, table):
777
    '''Adds a row number column to a table. Its name is in row_num_col. It will
778
    be the primary key.'''
779
    table = sql_gen.as_Table(table).to_str(db)
780
    run_query(db, 'ALTER TABLE '+table+' ADD COLUMN '+row_num_col
781
        +' serial NOT NULL PRIMARY KEY')
782

    
783
def tables(db, schema='public', table_like='%'):
784
    module = util.root_module(db.db)
785
    params = {'schema': schema, 'table_like': table_like}
786
    if module == 'psycopg2':
787
        return values(run_query(db, '''\
788
SELECT tablename
789
FROM pg_tables
790
WHERE
791
    schemaname = %(schema)s
792
    AND tablename LIKE %(table_like)s
793
ORDER BY tablename
794
''',
795
            params, cacheable=True))
796
    elif module == 'MySQLdb':
797
        return values(run_query(db, 'SHOW TABLES LIKE %(table_like)s', params,
798
            cacheable=True))
799
    else: raise NotImplementedError("Can't list tables for "+module+' database')
800

    
801
##### Database management
802

    
803
def empty_db(db, schema='public', **kw_args):
804
    '''For kw_args, see tables()'''
805
    for table in tables(db, schema, **kw_args): truncate(db, table, schema)
806

    
807
##### Heuristic queries
808

    
809
def put(db, table, row, pkey_=None, row_ct_ref=None):
810
    '''Recovers from errors.
811
    Only works under PostgreSQL (uses INSERT RETURNING).
812
    '''
813
    if pkey_ == None: pkey_ = pkey(db, table, recover=True)
814
    
815
    try:
816
        cur = insert(db, table, row, pkey_, recover=True)
817
        if row_ct_ref != None and cur.rowcount >= 0:
818
            row_ct_ref[0] += cur.rowcount
819
        return value(cur)
820
    except DuplicateKeyException, e:
821
        return value(select(db, table, [pkey_],
822
            util.dict_subset_right_join(row, e.cols), recover=True))
823

    
824
def get(db, table, row, pkey, row_ct_ref=None, create=False):
825
    '''Recovers from errors'''
826
    try: return value(select(db, table, [pkey], row, limit=1, recover=True))
827
    except StopIteration:
828
        if not create: raise
829
        return put(db, table, row, pkey, row_ct_ref) # insert new row
830

    
831
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None):
832
    '''Recovers from errors.
833
    Only works under PostgreSQL (uses INSERT RETURNING).
834
    @param in_tables The main input table to select from, followed by a list of
835
        tables to join with it using the main input table's pkey
836
    @param mapping dict(out_table_col=in_table_col, ...)
837
        * out_table_col: sql_gen.Col|str
838
        * in_table_col: sql_gen.Col Wrap literal values in a sql_gen.NamedCol
839
    @return sql_gen.Col Where the output pkeys are made available
840
    '''
841
    out_table = sql_gen.as_Table(out_table)
842
    for in_table_col in mapping.itervalues():
843
        assert isinstance(in_table_col, sql_gen.Col)
844
    
845
    temp_prefix = out_table.name
846
    pkeys = sql_gen.Table(temp_prefix+'_pkeys')
847
    
848
    # Create input joins from list of input tables
849
    in_tables_ = in_tables[:] # don't modify input!
850
    in_tables0 = in_tables_.pop(0) # first table is separate
851
    in_pkey = pkey(db, in_tables0, recover=True)
852
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
853
    input_joins = [in_tables0]+[sql_gen.Join(v, {in_pkey: sql_gen.join_same})
854
        for v in in_tables_]
855
    
856
    db.log_debug('Joining together input tables')
857
    # Place in new table for speed and so don't modify input if values edited
858
    in_table = sql_gen.Table(temp_prefix+'_in')
859
    flatten_cols = filter(sql_gen.is_table_col, mapping.values())
860
    mapping = dicts.join(mapping, flatten(db, in_table, input_joins,
861
        flatten_cols, preserve=[in_pkey_col], start=0))
862
    input_joins = [in_table]
863
    
864
    out_pkey = pkey(db, out_table, recover=True)
865
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
866
    
867
    pkeys_names = [in_pkey, out_pkey]
868
    pkeys_cols = [in_pkey_col, out_pkey_col]
869
    
870
    pkeys_table_exists_ref = [False]
871
    def run_query_into_pkeys(query, params):
872
        if pkeys_table_exists_ref[0]:
873
            insert_select(db, pkeys, pkeys_names, query, params)
874
        else:
875
            run_query_into(db, query, params, into=pkeys)
876
            pkeys_table_exists_ref[0] = True
877
    
878
    limit = None
879
    conds = set()
880
    distinct_on = []
881
    def mk_main_select(joins, cols):
882
        return mk_select(db, joins, cols, conds, distinct_on, limit=limit,
883
            start=0)
884
    
885
    def log_exc(e):
886
        db.log_debug('Caught exception: '+exc.str_(e, first_line_only=True))
887
    def ignore(in_col, value):
888
        in_col_str = str(in_col)
889
        db.log_debug('Adding index on '+in_col_str+' to enable fast filtering')
890
        index_col(db, in_col)
891
        db.log_debug('Ignoring rows with '+in_col_str+' = '+repr(value))
892
    def remove_rows(in_col, value):
893
        ignore(in_col, value)
894
        cond = (in_col, sql_gen.CompareCond(value, '!='))
895
        assert cond not in conds # avoid infinite loops
896
        conds.add(cond)
897
    def invalid2null(in_col, value):
898
        ignore(in_col, value)
899
        update(db, in_table, [(in_col, None)],
900
            sql_gen.ColValueCond(in_col, value))
901
    
902
    # Do inserts and selects
903
    join_cols = {}
904
    insert_out_pkeys = sql_gen.Table(temp_prefix+'_insert_out_pkeys')
905
    insert_in_pkeys = sql_gen.Table(temp_prefix+'_insert_in_pkeys')
906
    while True:
907
        has_joins = join_cols != {}
908
        
909
        # Prepare to insert new rows
910
        insert_joins = input_joins[:] # don't modify original!
911
        insert_args = dict(recover=True, cacheable=False)
912
        if has_joins:
913
            distinct_on = [v.to_Col() for v in join_cols.values()]
914
            insert_joins.append(sql_gen.Join(out_table, join_cols,
915
                sql_gen.filter_out))
916
        else:
917
            insert_args.update(dict(returning=out_pkey, into=insert_out_pkeys))
918
        
919
        db.log_debug('Inserting new rows')
920
        try:
921
            cur = insert_select(db, out_table, mapping.keys(),
922
                *mk_main_select(insert_joins, mapping.values()), **insert_args)
923
            break # insert successful
924
        except DuplicateKeyException, e:
925
            log_exc(e)
926
            
927
            old_join_cols = join_cols.copy()
928
            join_cols.update(util.dict_subset(mapping, e.cols))
929
            db.log_debug('Ignoring existing rows, comparing on '+str(join_cols))
930
            assert join_cols != old_join_cols # avoid infinite loops
931
        except NullValueException, e:
932
            log_exc(e)
933
            
934
            out_col, = e.cols
935
            try: in_col = mapping[out_col]
936
            except KeyError:
937
                db.log_debug('Missing mapping for NOT NULL '+out_col)
938
                limit = 0 # just create an empty pkeys table
939
            else: remove_rows(in_col, None)
940
        except FunctionValueException, e:
941
            log_exc(e)
942
            
943
            assert e.name == out_table.name
944
            out_col = 'value' # assume function param was named "value"
945
            invalid2null(mapping[out_col], e.value)
946
        # after exception handled, rerun loop with additional constraints
947
    
948
    if row_ct_ref != None and cur.rowcount >= 0:
949
        row_ct_ref[0] += cur.rowcount
950
    
951
    if has_joins:
952
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
953
        db.log_debug('Getting output pkeys of existing/inserted rows')
954
        run_query_into_pkeys(*mk_select(db, select_joins, pkeys_cols,
955
            order_by=None, start=0))
956
    else:
957
        add_row_num(db, insert_out_pkeys) # for joining with input pkeys
958
        
959
        db.log_debug('Getting input pkeys for rows in insert')
960
        run_query_into(db, *mk_main_select(input_joins, [in_pkey]),
961
            into=insert_in_pkeys)
962
        add_row_num(db, insert_in_pkeys) # for joining with output pkeys
963
        
964
        db.log_debug('Joining together output and input pkeys')
965
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
966
            {row_num_col: sql_gen.join_same_not_null})]
967
        run_query_into_pkeys(*mk_select(db, pkey_joins, pkeys_names,
968
            order_by=None, start=0))
969
    
970
    db.log_debug('Adding pkey on returned pkeys table to enable fast joins')
971
    index_pkey(db, pkeys)
972
    
973
    db.log_debug("Setting missing rows' pkeys to NULL")
974
    missing_rows_joins = input_joins+[sql_gen.Join(pkeys,
975
        {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
976
        # must use join_same_not_null or query will take forever
977
    run_query_into_pkeys(*mk_select(db, missing_rows_joins,
978
        [in_pkey_col, sql_gen.NamedCol(out_pkey, None)], order_by=None,
979
        start=0))
980
    
981
    return sql_gen.Col(out_pkey, pkeys)
982

    
983
##### Data cleanup
984

    
985
def cleanup_table(db, table, cols):
986
    def esc_name_(name): return esc_name(db, name)
987
    
988
    table = sql_gen.as_Table(table).to_str(db)
989
    cols = map(esc_name_, cols)
990
    
991
    run_query(db, 'UPDATE '+table+' SET\n'+(',\n'.join(('\n'+col
992
        +' = nullif(nullif(trim(both from '+col+"), %(null0)s), %(null1)s)"
993
            for col in cols))),
994
        dict(null0='', null1=r'\N'))
(23-23/35)