Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
def data_exception_handler(db, srcs=[], errors_table=None):
41
    '''Handles a data_exception by saving the error or converting it to a
42
    warning, and returning NULL.
43
    @param srcs The column names for the errors table
44
    @param errors_table None|sql_gen.Table
45
    '''
46
    save_errors = errors_table != None and srcs
47
    handler = ''
48
    if save_errors:
49
        errors_table_cols = map(sql_gen.Col,
50
            ['column', 'value', 'error_code', 'error'])
51
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
52
            [[c.name] for c in srcs]), order_by=None)
53
        insert_query = sql.mk_insert_select(db, errors_table, errors_table_cols,
54
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
55
        handler += '''\
56
-- Save error in errors table.
57
DECLARE
58
    error_code text := SQLSTATE;
59
    error text := SQLERRM;
60
BEGIN
61
    -- Insert the value and error for *each* source column.
62
'''+strings.indent(sql_gen.RowExcIgnore('text', col_names_query, insert_query,
63
    row_var=errors_table_cols[0]).to_str(db))+'''
64
END;
65
'''
66
    else:
67
        handler += '''\
68
RAISE WARNING '%', SQLERRM;
69
'''
70
    handler += '''\
71
RETURN NULL;
72
'''
73
    return sql_gen.ExcHandler('data_exception', handler)
74

    
75
def cast(db, type_, col, errors_table=None):
76
    '''Casts an (unrenamed) column or value.
77
    If errors_table set and col has srcs, saves errors in errors_table (using
78
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
79
    @param col str|sql_gen.Col|sql_gen.Literal
80
    @param errors_table None|sql_gen.Table|str
81
    '''
82
    col = sql_gen.as_Col(col)
83
    
84
    # Don't convert exceptions to warnings for user-supplied constants
85
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
86
    
87
    assert not isinstance(col, sql_gen.NamedCol)
88
    
89
    function_name = strings.first_word(type_)
90
    srcs = col.srcs
91
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
92
        and col.srcs != ())
93
    if save_errors:
94
        srcs = map(sql_gen.to_name_only_col, col.srcs)
95
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
96
    function = db.TempFunction(function_name)
97
    
98
    # Create function definition
99
    modifiers = 'STRICT'
100
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
101
    handler = data_exception_handler(db, srcs, errors_table)
102
    body = sql_gen.CustomCode(handler.to_str(db, '''\
103
/* The explicit cast to the return type is needed to make the cast happen
104
inside the try block. (Implicit casts to the return type happen at the end
105
of the function, outside any block.) */
106
RETURN value::'''+type_+''';
107
'''))
108
    body.lang='plpgsql'
109
    def_ = sql_gen.FunctionDef(function, type_, body,
110
        [sql_gen.FunctionParam('value', 'text')], modifiers)
111
    
112
    # Create function
113
    while True:
114
        try:
115
            sql.run_query(db, def_.to_str(db), recover=True, cacheable=True,
116
                log_ignore_excs=(sql.DuplicateException,))
117
            break # successful
118
        except sql.DuplicateException:
119
            function.name = sql.next_version(function.name)
120
            # try again with next version of name
121
    
122
    return sql_gen.FunctionCall(function, col)
123

    
124
def cast_temp_col(db, type_, col, errors_table=None):
125
    '''Like cast(), but creates a new column with the cast values if the input
126
    is a column.
127
    @return The new column or cast value
128
    '''
129
    def cast_(col): return cast(db, type_, col, errors_table)
130
    
131
    try: col = sql_gen.underlying_col(col)
132
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
133
    
134
    table = col.table
135
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
136
    expr = cast_(col)
137
    
138
    # Add column
139
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
140
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
141
    new_col.name = new_typed_col.name # propagate any renaming
142
    
143
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
144
    
145
    return new_col
146

    
147
def errors_table(db, table, if_exists=True):
148
    '''
149
    @param if_exists If set, returns None if the errors table doesn't exist
150
    @return None|sql_gen.Table
151
    '''
152
    table = sql_gen.as_Table(table)
153
    if table.srcs != (): table = table.srcs[0]
154
    
155
    errors_table = sql_gen.suffixed_table(table, '.errors')
156
    if if_exists and not sql.table_exists(db, errors_table): return None
157
    return errors_table
158

    
159
##### Import
160

    
161
def put(db, table, row, pkey_=None, row_ct_ref=None):
162
    '''Recovers from errors.
163
    Only works under PostgreSQL (uses INSERT RETURNING).
164
    '''
165
    row = sql_gen.ColDict(db, table, row)
166
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
167
    
168
    try:
169
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
170
        if row_ct_ref != None and cur.rowcount >= 0:
171
            row_ct_ref[0] += cur.rowcount
172
        return sql.value(cur)
173
    except sql.DuplicateKeyException, e:
174
        row = sql_gen.ColDict(db, table,
175
            util.dict_subset_right_join(row, e.cols))
176
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
177
            log_level=3.5))
178
    except sql.NullValueException: return None
179

    
180
def get(db, table, row, pkey, row_ct_ref=None, create=False):
181
    '''Recovers from errors'''
182
    try:
183
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
184
            recover=True))
185
    except StopIteration:
186
        if not create: raise
187
        return put(db, table, row, pkey, row_ct_ref) # insert new row
188

    
189
def is_func_result(col):
190
    return col.table.name.find('(') >= 0 and col.name == 'result'
191

    
192
def into_table_name(out_table, in_tables0, mapping, is_func):
193
    def in_col_str(in_col):
194
        in_col = sql_gen.remove_col_rename(in_col)
195
        if isinstance(in_col, sql_gen.Col):
196
            table = in_col.table
197
            if table == in_tables0:
198
                in_col = sql_gen.to_name_only_col(in_col)
199
            elif is_func_result(in_col): in_col = table # omit col name
200
        return str(in_col)
201
    
202
    str_ = str(out_table)
203
    if is_func:
204
        str_ += '('
205
        
206
        try: value_in_col = mapping['value']
207
        except KeyError:
208
            str_ += ', '.join((str(k)+'='+in_col_str(v)
209
                for k, v in mapping.iteritems()))
210
        else: str_ += in_col_str(value_in_col)
211
        
212
        str_ += ')'
213
    else:
214
        out_col = 'rank'
215
        try: in_col = mapping[out_col]
216
        except KeyError: str_ += '_pkeys'
217
        else: # has a rank column, so hierarchical
218
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
219
    return str_
220

    
221
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
222
    default=None, is_func=False, on_error=exc.raise_):
223
    '''Recovers from errors.
224
    Only works under PostgreSQL (uses INSERT RETURNING).
225
    IMPORTANT: Must be run at the *beginning* of a transaction.
226
    @param in_tables The main input table to select from, followed by a list of
227
        tables to join with it using the main input table's pkey
228
    @param mapping dict(out_table_col=in_table_col, ...)
229
        * out_table_col: str (*not* sql_gen.Col)
230
        * in_table_col: sql_gen.Col|literal-value
231
    @param into The table to contain the output and input pkeys.
232
        Defaults to `out_table.name+'_pkeys'`.
233
    @param default The *output* column to use as the pkey for missing rows.
234
        If this output column does not exist in the mapping, uses None.
235
    @param is_func Whether out_table is the name of a SQL function, not a table
236
    @return sql_gen.Col Where the output pkeys are made available
237
    '''
238
    import psycopg2.extensions
239
    
240
    out_table = sql_gen.as_Table(out_table)
241
    
242
    def log_debug(msg): db.log_debug(msg, level=1.5)
243
    def col_ustr(str_):
244
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
245
    
246
    log_debug('********** New iteration **********')
247
    log_debug('Inserting these input columns into '+strings.as_tt(
248
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
249
    
250
    is_function = sql.function_exists(db, out_table)
251
    
252
    if is_function: out_pkey = 'result'
253
    else: out_pkey = sql.pkey(db, out_table, recover=True)
254
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
255
    
256
    in_tables_ = in_tables[:] # don't modify input!
257
    try: in_tables0 = in_tables_.pop(0) # first table is separate
258
    except IndexError: in_tables0 = None
259
    else:
260
        in_pkey = sql.pkey(db, in_tables0, recover=True)
261
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
262
    
263
    # Determine if can use optimization for only literal values
264
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
265
        mapping.values()), False)
266
    is_literals_or_function = is_literals or is_function
267
    
268
    if in_tables0 == None: errors_table_ = None
269
    else: errors_table_ = errors_table(db, in_tables0)
270
    
271
    # Create input joins from list of input tables
272
    input_joins = [in_tables0]+[sql_gen.Join(v,
273
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
274
    
275
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
276
        mapping = {out_pkey: None} # ColDict will replace with default value
277
    
278
    if not is_literals:
279
        if into == None:
280
            into = into_table_name(out_table, in_tables0, mapping, is_func)
281
        into = sql_gen.as_Table(into)
282
        
283
        # Set column sources
284
        in_cols = filter(sql_gen.is_table_col, mapping.values())
285
        for col in in_cols:
286
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
287
        
288
        log_debug('Joining together input tables into temp table')
289
        # Place in new table so don't modify input and for speed
290
        in_table = sql_gen.Table('in')
291
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
292
            in_cols, preserve=[in_pkey_col]))
293
        input_joins = [in_table]
294
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
295
    
296
    mapping = sql_gen.ColDict(db, out_table, mapping)
297
        # after applying dicts.join() because that returns a plain dict
298
    
299
    # Resolve default value column
300
    if default != None:
301
        try: default = mapping[default]
302
        except KeyError:
303
            db.log_debug('Default value column '
304
                +strings.as_tt(strings.repr_no_u(default))
305
                +' does not exist in mapping, falling back to None', level=2.1)
306
            default = None
307
    
308
    # Save default values for all rows since in_table may have rows deleted
309
    if is_literals: pass
310
    elif is_function: full_in_table = in_table
311
    else:
312
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
313
        full_in_table_cols = [in_pkey_col]
314
        if default != None:
315
            full_in_table_cols.append(default)
316
            default = sql_gen.with_table(default, full_in_table)
317
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
318
            order_by=None), into=full_in_table, add_pkey_=True)
319
    
320
    if not is_literals:
321
        pkeys_names = [in_pkey, out_pkey]
322
        pkeys_cols = [in_pkey_col, out_pkey_col]
323
    
324
    pkeys_table_exists_ref = [False]
325
    def insert_into_pkeys(joins, cols, limit=None, **kw_args):
326
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
327
        if pkeys_table_exists_ref[0]:
328
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
329
        else:
330
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
331
            pkeys_table_exists_ref[0] = True
332
    
333
    limit_ref = [None]
334
    def mk_main_select(joins, cols):
335
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
336
    
337
    if is_literals: insert_in_table = None
338
    else:
339
        insert_in_table = in_table
340
        insert_in_tables = [insert_in_table]
341
    join_cols = sql_gen.ColDict(db, out_table)
342
    
343
    exc_strs = set()
344
    def log_exc(e):
345
        e_str = exc.str_(e, first_line_only=True)
346
        log_debug('Caught exception: '+e_str)
347
        assert e_str not in exc_strs # avoid infinite loops
348
        exc_strs.add(e_str)
349
    
350
    def remove_all_rows():
351
        log_debug('Ignoring all rows')
352
        limit_ref[0] = 0 # just create an empty pkeys table
353
    
354
    def ignore_cond(cond, e):
355
        out_table_cols = sql_gen.ColDict(db, out_table)
356
        out_table_cols.update(util.dict_subset_right_join({},
357
            sql.table_cols(db, out_table)))
358
        
359
        in_cols = []
360
        cond = sql.map_expr(db, cond, mapping, in_cols)
361
        cond = sql.map_expr(db, cond, out_table_cols)
362
        
363
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
364
            e.cause.pgcode,
365
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
366
        
367
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
368
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
369
        sql.delete(db, insert_in_table, not_cond)
370
    
371
    not_null_cols = set()
372
    def ignore(in_col, value, e):
373
        in_col = sql_gen.with_table(in_col, insert_in_table)
374
        
375
        track_data_error(db, errors_table_, in_col.srcs, value,
376
            e.cause.pgcode, e.cause.pgerror)
377
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
378
            +strings.as_tt(repr(value)))
379
        
380
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
381
        if value != None and in_col not in not_null_cols:
382
            # Try just mapping the value to NULL
383
            sql.update(db, insert_in_table, [(in_col, None)],
384
                sql_gen.ColValueCond(in_col, value))
385
        else:
386
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
387
            if value == None: not_null_cols.add(in_col)
388
    
389
    if not is_literals:
390
        def insert_pkeys_table(which):
391
            return sql_gen.Table(sql_gen.concat(in_table.name,
392
                '_insert_'+which+'_pkeys'))
393
        insert_out_pkeys = insert_pkeys_table('out')
394
        insert_in_pkeys = insert_pkeys_table('in')
395
    
396
    # Do inserts and selects
397
    while True:
398
        has_joins = join_cols != {}
399
        
400
        if limit_ref[0] == 0: # special case
401
            assert not has_joins
402
            
403
            if is_literals: return None
404
            log_debug('Creating an empty output pkeys table')
405
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
406
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
407
            break # don't do main case
408
        
409
        log_debug('Trying to insert new rows')
410
        
411
        # Prepare to insert new rows
412
        if is_function:
413
            log_debug('Calling function on input rows')
414
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
415
            func_call = sql_gen.NamedCol(out_pkey,
416
                sql_gen.FunctionCall(out_table, **args))
417
            def insert_func_call(limit=None):
418
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
419
                    limit=limit, recover=True)
420
            
421
            if not is_literals:
422
                # Create empty pkeys table so its row type can be used
423
                insert_func_call(limit=0)
424
        else:
425
            insert_args = dict(recover=True, cacheable=False)
426
            if has_joins:
427
                insert_args.update(dict(ignore=True))
428
            else:
429
                insert_args.update(dict(returning=out_pkey))
430
                if not is_literals:
431
                    insert_args.update(dict(into=insert_out_pkeys))
432
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
433
                c, insert_in_table) for c in mapping.values()])
434
        
435
        try:
436
            cur = None
437
            if is_function:
438
                if is_literals: cur = sql.select(db, fields=[func_call])
439
                else: insert_func_call()
440
            else:
441
                cur = sql.insert_select(db, out_table, mapping.keys(),
442
                    main_select, **insert_args)
443
            break # insert successful
444
        except sql.MissingCastException, e:
445
            log_exc(e)
446
            
447
            out_col = e.col
448
            type_ = e.type
449
            
450
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
451
                +strings.as_tt(type_))
452
            in_col = mapping[out_col]
453
            while True:
454
                try:
455
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
456
                        errors_table_)
457
                    break # cast successful
458
                except sql.InvalidValueException, e:
459
                    log_exc(e)
460
                    
461
                    ignore(in_col, e.value, e)
462
        except sql.DuplicateKeyException, e:
463
            log_exc(e)
464
            
465
            # Different rows violating different unique constraints not
466
            # supported
467
            assert not join_cols
468
            
469
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
470
            log_debug('Ignoring existing rows, comparing on these columns:\n'
471
                +strings.as_inline_table(join_cols, ustr=col_ustr))
472
            
473
            if is_literals:
474
                return sql.value(sql.select(db, out_table, [out_pkey_col],
475
                    mapping, order_by=None))
476
            
477
            # Uniquify input table to avoid internal duplicate keys
478
            insert_in_table = sql.distinct_table(db, insert_in_table,
479
                join_cols.values())
480
            insert_in_tables.append(insert_in_table)
481
        except sql.NullValueException, e:
482
            log_exc(e)
483
            
484
            out_col, = e.cols
485
            try: in_col = mapping[out_col]
486
            except KeyError:
487
                msg = 'Missing mapping for NOT NULL column '+out_col
488
                log_debug(msg)
489
                if default == None: on_error(SyntaxError(msg)) # required col
490
                remove_all_rows()
491
            else: ignore(in_col, None, e)
492
        except sql.CheckException, e:
493
            log_exc(e)
494
            
495
            ignore_cond(e.cond, e)
496
        except sql.InvalidValueException, e:
497
            log_exc(e)
498
            
499
            for in_col in mapping.values(): ignore(in_col, e.value, e)
500
        except psycopg2.extensions.TransactionRollbackError, e:
501
            log_exc(e)
502
            # retry
503
        except sql.DatabaseErrors, e:
504
            log_exc(e)
505
            
506
            log_debug('No handler for exception')
507
            on_error(e)
508
            remove_all_rows()
509
        # after exception handled, rerun loop with additional constraints
510
    
511
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
512
        row_ct_ref[0] += cur.rowcount
513
    
514
    if is_literals_or_function: pass # pkeys table already created
515
    elif has_joins:
516
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
517
        log_debug('Getting output table pkeys of existing/inserted rows')
518
        insert_into_pkeys(select_joins, pkeys_cols)
519
    else:
520
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
521
        
522
        log_debug('Getting input table pkeys of inserted rows')
523
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
524
        # since the SELECT query is identical to the one used in INSERT SELECT,
525
        # its rows will be retrieved in the same order.
526
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
527
            into=insert_in_pkeys)
528
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
529
        
530
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
531
            db, insert_in_pkeys)
532
        
533
        log_debug('Combining output and input pkeys in inserted order')
534
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
535
            {sql.row_num_col: sql_gen.join_same_not_null})]
536
        insert_into_pkeys(pkey_joins, pkeys_names)
537
        
538
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
539
    
540
    if not is_literals_or_function:
541
        log_debug('Setting pkeys of missing rows to '
542
            +strings.as_tt(repr(default)))
543
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
544
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
545
            # must use join_same_not_null or query will take forever
546
        insert_into_pkeys(missing_rows_joins,
547
            [sql_gen.Col(in_pkey, full_in_table),
548
            sql_gen.NamedCol(out_pkey, default)])
549
    # otherwise, there is already an entry for every row
550
    
551
    if is_literals: return sql.value(cur)
552
    else:
553
        assert (sql.table_row_count(db, into)
554
            == sql.table_row_count(db, full_in_table))
555
        
556
        sql.empty_temp(db, insert_in_tables+[full_in_table])
557
        
558
        srcs = []
559
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
560
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)