Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
def data_exception_handler(db, srcs=[], errors_table=None):
41
    '''Handles a data_exception by saving the error or converting it to a
42
    warning, and returning NULL.
43
    @param srcs The column names for the errors table
44
    @param errors_table None|sql_gen.Table
45
    @pre The invalid value must be in a local variable of type text.
46
    '''
47
    save_errors = errors_table != None and srcs
48
    handler = ''
49
    if save_errors:
50
        errors_table_cols = map(sql_gen.Col,
51
            ['column', 'value', 'error_code', 'error'])
52
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
53
            [[c.name] for c in srcs]), order_by=None)
54
        insert_query = sql.mk_insert_select(db, errors_table, errors_table_cols,
55
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
56
        handler += '''\
57
-- Save error in errors table.
58
DECLARE
59
    error_code text := SQLSTATE;
60
    error text := SQLERRM;
61
BEGIN
62
    -- Insert the value and error for *each* source column.
63
'''+strings.indent(sql_gen.RowExcIgnore('text', col_names_query, insert_query,
64
    row_var=errors_table_cols[0]).to_str(db))+'''
65
END;
66
'''
67
    else:
68
        handler += '''\
69
RAISE WARNING '%', SQLERRM;
70
'''
71
    handler += '''\
72
RETURN NULL;
73
'''
74
    return sql_gen.ExcHandler('data_exception', handler)
75

    
76
def cast(db, type_, col, errors_table=None):
77
    '''Casts an (unrenamed) column or value.
78
    If errors_table set and col has srcs, saves errors in errors_table (using
79
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
80
    @param col str|sql_gen.Col|sql_gen.Literal
81
    @param errors_table None|sql_gen.Table|str
82
    '''
83
    col = sql_gen.as_Col(col)
84
    
85
    # Don't convert exceptions to warnings for user-supplied constants
86
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
87
    
88
    assert not isinstance(col, sql_gen.NamedCol)
89
    
90
    function_name = strings.first_word(type_)
91
    srcs = col.srcs
92
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
93
        and col.srcs != ())
94
    if save_errors:
95
        srcs = map(sql_gen.to_name_only_col, col.srcs)
96
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
97
    function = db.TempFunction(function_name)
98
    
99
    # Create function definition
100
    modifiers = 'STRICT'
101
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
102
    handler = data_exception_handler(db, srcs, errors_table)
103
    body = sql_gen.CustomCode(handler.to_str(db, '''\
104
/* The explicit cast to the return type is needed to make the cast happen
105
inside the try block. (Implicit casts to the return type happen at the end
106
of the function, outside any block.) */
107
RETURN value::'''+type_+''';
108
'''))
109
    body.lang='plpgsql'
110
    def_ = sql_gen.FunctionDef(function, type_, body,
111
        [sql_gen.FunctionParam('value', 'text')], modifiers)
112
    
113
    # Create function
114
    while True:
115
        try:
116
            sql.run_query(db, def_.to_str(db), recover=True, cacheable=True,
117
                log_ignore_excs=(sql.DuplicateException,))
118
            break # successful
119
        except sql.DuplicateException:
120
            function.name = sql.next_version(function.name)
121
            # try again with next version of name
122
    
123
    return sql_gen.FunctionCall(function, col)
124

    
125
def cast_temp_col(db, type_, col, errors_table=None):
126
    '''Like cast(), but creates a new column with the cast values if the input
127
    is a column.
128
    @return The new column or cast value
129
    '''
130
    def cast_(col): return cast(db, type_, col, errors_table)
131
    
132
    try: col = sql_gen.underlying_col(col)
133
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
134
    
135
    table = col.table
136
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
137
    expr = cast_(col)
138
    
139
    # Add column
140
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
141
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
142
    new_col.name = new_typed_col.name # propagate any renaming
143
    
144
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
145
    
146
    return new_col
147

    
148
def errors_table(db, table, if_exists=True):
149
    '''
150
    @param if_exists If set, returns None if the errors table doesn't exist
151
    @return None|sql_gen.Table
152
    '''
153
    table = sql_gen.as_Table(table)
154
    if table.srcs != (): table = table.srcs[0]
155
    
156
    errors_table = sql_gen.suffixed_table(table, '.errors')
157
    if if_exists and not sql.table_exists(db, errors_table): return None
158
    return errors_table
159

    
160
##### Import
161

    
162
def put(db, table, row, pkey_=None, row_ct_ref=None):
163
    '''Recovers from errors.
164
    Only works under PostgreSQL (uses INSERT RETURNING).
165
    '''
166
    row = sql_gen.ColDict(db, table, row)
167
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
168
    
169
    try:
170
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
171
        if row_ct_ref != None and cur.rowcount >= 0:
172
            row_ct_ref[0] += cur.rowcount
173
        return sql.value(cur)
174
    except sql.DuplicateKeyException, e:
175
        row = sql_gen.ColDict(db, table,
176
            util.dict_subset_right_join(row, e.cols))
177
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
178
            log_level=3.5))
179
    except sql.NullValueException: return None
180

    
181
def get(db, table, row, pkey, row_ct_ref=None, create=False):
182
    '''Recovers from errors'''
183
    try:
184
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
185
            recover=True))
186
    except StopIteration:
187
        if not create: raise
188
        return put(db, table, row, pkey, row_ct_ref) # insert new row
189

    
190
def is_func_result(col):
191
    return col.table.name.find('(') >= 0 and col.name == 'result'
192

    
193
def into_table_name(out_table, in_tables0, mapping, is_func):
194
    def in_col_str(in_col):
195
        in_col = sql_gen.remove_col_rename(in_col)
196
        if isinstance(in_col, sql_gen.Col):
197
            table = in_col.table
198
            if table == in_tables0:
199
                in_col = sql_gen.to_name_only_col(in_col)
200
            elif is_func_result(in_col): in_col = table # omit col name
201
        return str(in_col)
202
    
203
    str_ = str(out_table)
204
    if is_func:
205
        str_ += '('
206
        
207
        try: value_in_col = mapping['value']
208
        except KeyError:
209
            str_ += ', '.join((str(k)+'='+in_col_str(v)
210
                for k, v in mapping.iteritems()))
211
        else: str_ += in_col_str(value_in_col)
212
        
213
        str_ += ')'
214
    else:
215
        out_col = 'rank'
216
        try: in_col = mapping[out_col]
217
        except KeyError: str_ += '_pkeys'
218
        else: # has a rank column, so hierarchical
219
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
220
    return str_
221

    
222
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
223
    default=None, is_func=False, on_error=exc.raise_):
224
    '''Recovers from errors.
225
    Only works under PostgreSQL (uses INSERT RETURNING).
226
    IMPORTANT: Must be run at the *beginning* of a transaction.
227
    @param in_tables The main input table to select from, followed by a list of
228
        tables to join with it using the main input table's pkey
229
    @param mapping dict(out_table_col=in_table_col, ...)
230
        * out_table_col: str (*not* sql_gen.Col)
231
        * in_table_col: sql_gen.Col|literal-value
232
    @param into The table to contain the output and input pkeys.
233
        Defaults to `out_table.name+'_pkeys'`.
234
    @param default The *output* column to use as the pkey for missing rows.
235
        If this output column does not exist in the mapping, uses None.
236
    @param is_func Whether out_table is the name of a SQL function, not a table
237
    @return sql_gen.Col Where the output pkeys are made available
238
    '''
239
    import psycopg2.extensions
240
    
241
    out_table = sql_gen.as_Table(out_table)
242
    
243
    def log_debug(msg): db.log_debug(msg, level=1.5)
244
    def col_ustr(str_):
245
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
246
    
247
    log_debug('********** New iteration **********')
248
    log_debug('Inserting these input columns into '+strings.as_tt(
249
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
250
    
251
    is_function = sql.function_exists(db, out_table)
252
    
253
    if is_function: out_pkey = 'result'
254
    else: out_pkey = sql.pkey(db, out_table, recover=True)
255
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
256
    
257
    in_tables_ = in_tables[:] # don't modify input!
258
    try: in_tables0 = in_tables_.pop(0) # first table is separate
259
    except IndexError: in_tables0 = None
260
    else:
261
        in_pkey = sql.pkey(db, in_tables0, recover=True)
262
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
263
    
264
    # Determine if can use optimization for only literal values
265
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
266
        mapping.values()), False)
267
    is_literals_or_function = is_literals or is_function
268
    
269
    if in_tables0 == None: errors_table_ = None
270
    else: errors_table_ = errors_table(db, in_tables0)
271
    
272
    # Create input joins from list of input tables
273
    input_joins = [in_tables0]+[sql_gen.Join(v,
274
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
275
    
276
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
277
        mapping = {out_pkey: None} # ColDict will replace with default value
278
    
279
    if not is_literals:
280
        if into == None:
281
            into = into_table_name(out_table, in_tables0, mapping, is_func)
282
        into = sql_gen.as_Table(into)
283
        
284
        # Set column sources
285
        in_cols = filter(sql_gen.is_table_col, mapping.values())
286
        for col in in_cols:
287
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
288
        
289
        log_debug('Joining together input tables into temp table')
290
        # Place in new table so don't modify input and for speed
291
        in_table = sql_gen.Table('in')
292
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
293
            in_cols, preserve=[in_pkey_col]))
294
        input_joins = [in_table]
295
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
296
    
297
    mapping = sql_gen.ColDict(db, out_table, mapping)
298
        # after applying dicts.join() because that returns a plain dict
299
    
300
    # Resolve default value column
301
    if default != None:
302
        try: default = mapping[default]
303
        except KeyError:
304
            db.log_debug('Default value column '
305
                +strings.as_tt(strings.repr_no_u(default))
306
                +' does not exist in mapping, falling back to None', level=2.1)
307
            default = None
308
    
309
    # Save default values for all rows since in_table may have rows deleted
310
    if is_literals: pass
311
    elif is_function: full_in_table = in_table
312
    else:
313
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
314
        full_in_table_cols = [in_pkey_col]
315
        if default != None:
316
            full_in_table_cols.append(default)
317
            default = sql_gen.with_table(default, full_in_table)
318
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
319
            order_by=None), into=full_in_table, add_pkey_=True)
320
    
321
    if not is_literals:
322
        pkeys_names = [in_pkey, out_pkey]
323
        pkeys_cols = [in_pkey_col, out_pkey_col]
324
    
325
    pkeys_table_exists_ref = [False]
326
    def insert_into_pkeys(joins, cols, limit=None, **kw_args):
327
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
328
        if pkeys_table_exists_ref[0]:
329
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
330
        else:
331
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
332
            pkeys_table_exists_ref[0] = True
333
    
334
    limit_ref = [None]
335
    def mk_main_select(joins, cols):
336
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
337
    
338
    if is_literals: insert_in_table = None
339
    else:
340
        insert_in_table = in_table
341
        insert_in_tables = [insert_in_table]
342
    join_cols = sql_gen.ColDict(db, out_table)
343
    
344
    exc_strs = set()
345
    def log_exc(e):
346
        e_str = exc.str_(e, first_line_only=True)
347
        log_debug('Caught exception: '+e_str)
348
        assert e_str not in exc_strs # avoid infinite loops
349
        exc_strs.add(e_str)
350
    
351
    def remove_all_rows():
352
        log_debug('Ignoring all rows')
353
        limit_ref[0] = 0 # just create an empty pkeys table
354
    
355
    def ignore_cond(cond, e):
356
        out_table_cols = sql_gen.ColDict(db, out_table)
357
        out_table_cols.update(util.dict_subset_right_join({},
358
            sql.table_cols(db, out_table)))
359
        
360
        in_cols = []
361
        cond = sql.map_expr(db, cond, mapping, in_cols)
362
        cond = sql.map_expr(db, cond, out_table_cols)
363
        
364
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
365
            e.cause.pgcode,
366
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
367
        
368
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
369
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
370
        sql.delete(db, insert_in_table, not_cond)
371
    
372
    not_null_cols = set()
373
    def ignore(in_col, value, e):
374
        in_col = sql_gen.with_table(in_col, insert_in_table)
375
        
376
        track_data_error(db, errors_table_, in_col.srcs, value,
377
            e.cause.pgcode, e.cause.pgerror)
378
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
379
            +strings.as_tt(repr(value)))
380
        
381
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
382
        if value != None and in_col not in not_null_cols:
383
            # Try just mapping the value to NULL
384
            sql.update(db, insert_in_table, [(in_col, None)],
385
                sql_gen.ColValueCond(in_col, value))
386
        else:
387
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
388
            if value == None: not_null_cols.add(in_col)
389
    
390
    if not is_literals:
391
        def insert_pkeys_table(which):
392
            return sql_gen.Table(sql_gen.concat(in_table.name,
393
                '_insert_'+which+'_pkeys'))
394
        insert_out_pkeys = insert_pkeys_table('out')
395
        insert_in_pkeys = insert_pkeys_table('in')
396
    
397
    # Do inserts and selects
398
    while True:
399
        has_joins = join_cols != {}
400
        
401
        if limit_ref[0] == 0: # special case
402
            assert not has_joins
403
            
404
            if is_literals: return None
405
            log_debug('Creating an empty output pkeys table')
406
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
407
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
408
            break # don't do main case
409
        
410
        log_debug('Trying to insert new rows')
411
        
412
        # Prepare to insert new rows
413
        if is_function:
414
            log_debug('Calling function on input rows')
415
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
416
            func_call = sql_gen.NamedCol(out_pkey,
417
                sql_gen.FunctionCall(out_table, **args))
418
            def insert_func_call(limit=None):
419
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
420
                    limit=limit, recover=True)
421
            
422
            if not is_literals:
423
                # Create empty pkeys table so its row type can be used
424
                insert_func_call(limit=0)
425
        else:
426
            insert_args = dict(recover=True, cacheable=False)
427
            if has_joins:
428
                insert_args.update(dict(ignore=True))
429
            else:
430
                insert_args.update(dict(returning=out_pkey))
431
                if not is_literals:
432
                    insert_args.update(dict(into=insert_out_pkeys))
433
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
434
                c, insert_in_table) for c in mapping.values()])
435
        
436
        try:
437
            cur = None
438
            if is_function:
439
                if is_literals: cur = sql.select(db, fields=[func_call])
440
                else: insert_func_call()
441
            else:
442
                cur = sql.insert_select(db, out_table, mapping.keys(),
443
                    main_select, **insert_args)
444
            break # insert successful
445
        except sql.MissingCastException, e:
446
            log_exc(e)
447
            
448
            out_col = e.col
449
            type_ = e.type
450
            
451
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
452
                +strings.as_tt(type_))
453
            in_col = mapping[out_col]
454
            while True:
455
                try:
456
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
457
                        errors_table_)
458
                    break # cast successful
459
                except sql.InvalidValueException, e:
460
                    log_exc(e)
461
                    
462
                    ignore(in_col, e.value, e)
463
        except sql.DuplicateKeyException, e:
464
            log_exc(e)
465
            
466
            # Different rows violating different unique constraints not
467
            # supported
468
            assert not join_cols
469
            
470
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
471
            log_debug('Ignoring existing rows, comparing on these columns:\n'
472
                +strings.as_inline_table(join_cols, ustr=col_ustr))
473
            
474
            if is_literals:
475
                return sql.value(sql.select(db, out_table, [out_pkey_col],
476
                    mapping, order_by=None))
477
            
478
            # Uniquify input table to avoid internal duplicate keys
479
            insert_in_table = sql.distinct_table(db, insert_in_table,
480
                join_cols.values())
481
            insert_in_tables.append(insert_in_table)
482
        except sql.NullValueException, e:
483
            log_exc(e)
484
            
485
            out_col, = e.cols
486
            try: in_col = mapping[out_col]
487
            except KeyError:
488
                msg = 'Missing mapping for NOT NULL column '+out_col
489
                log_debug(msg)
490
                if default == None: on_error(SyntaxError(msg)) # required col
491
                remove_all_rows()
492
            else: ignore(in_col, None, e)
493
        except sql.CheckException, e:
494
            log_exc(e)
495
            
496
            ignore_cond(e.cond, e)
497
        except sql.InvalidValueException, e:
498
            log_exc(e)
499
            
500
            for in_col in mapping.values(): ignore(in_col, e.value, e)
501
        except psycopg2.extensions.TransactionRollbackError, e:
502
            log_exc(e)
503
            # retry
504
        except sql.DatabaseErrors, e:
505
            log_exc(e)
506
            
507
            log_debug('No handler for exception')
508
            on_error(e)
509
            remove_all_rows()
510
        # after exception handled, rerun loop with additional constraints
511
    
512
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
513
        row_ct_ref[0] += cur.rowcount
514
    
515
    if is_literals_or_function: pass # pkeys table already created
516
    elif has_joins:
517
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
518
        log_debug('Getting output table pkeys of existing/inserted rows')
519
        insert_into_pkeys(select_joins, pkeys_cols)
520
    else:
521
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
522
        
523
        log_debug('Getting input table pkeys of inserted rows')
524
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
525
        # since the SELECT query is identical to the one used in INSERT SELECT,
526
        # its rows will be retrieved in the same order.
527
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
528
            into=insert_in_pkeys)
529
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
530
        
531
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
532
            db, insert_in_pkeys)
533
        
534
        log_debug('Combining output and input pkeys in inserted order')
535
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
536
            {sql.row_num_col: sql_gen.join_same_not_null})]
537
        insert_into_pkeys(pkey_joins, pkeys_names)
538
        
539
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
540
    
541
    if not is_literals_or_function:
542
        log_debug('Setting pkeys of missing rows to '
543
            +strings.as_tt(repr(default)))
544
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
545
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
546
            # must use join_same_not_null or query will take forever
547
        insert_into_pkeys(missing_rows_joins,
548
            [sql_gen.Col(in_pkey, full_in_table),
549
            sql_gen.NamedCol(out_pkey, default)])
550
    # otherwise, there is already an entry for every row
551
    
552
    if is_literals: return sql.value(cur)
553
    else:
554
        assert (sql.table_row_count(db, into)
555
            == sql.table_row_count(db, full_in_table))
556
        
557
        sql.empty_temp(db, insert_in_tables+[full_in_table])
558
        
559
        srcs = []
560
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
561
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)