Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
def data_exception_handler(db, return_, srcs=[], errors_table=None):
41
    '''Handles a data_exception by saving the error or converting it to a
42
    warning, and returning NULL.
43
    @param return_ Statement to return a default value in case of error
44
    @param srcs The column names for the errors table
45
    @param errors_table None|sql_gen.Table
46
    @pre The invalid value must be in a local variable "value" of type text.
47
    '''
48
    return_ = sql_gen.as_Code(return_)
49
    
50
    save_errors = errors_table != None and srcs
51
    handler = ''
52
    if save_errors:
53
        errors_table_cols = map(sql_gen.Col,
54
            ['column', 'value', 'error_code', 'error'])
55
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
56
            [[c.name] for c in srcs]), order_by=None)
57
        insert_query = sql.mk_insert_select(db, errors_table, errors_table_cols,
58
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
59
        handler = '''\
60
-- Save error in errors table.
61
DECLARE
62
    error_code text := SQLSTATE;
63
    error text := SQLERRM;
64
BEGIN
65
    -- Insert the value and error for *each* source column.
66
'''+strings.indent(sql_gen.RowExcIgnore('text', col_names_query, insert_query,
67
    row_var=errors_table_cols[0]).to_str(db))+'''
68
END;
69

    
70
'''+return_.to_str(db)
71
    else: handler = sql_gen.ExcToWarning(return_)
72
    return sql_gen.ExcHandler('data_exception', handler)
73

    
74
def cast(db, type_, col, errors_table=None):
75
    '''Casts an (unrenamed) column or value.
76
    If errors_table set and col has srcs, saves errors in errors_table (using
77
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
78
    @param col str|sql_gen.Col|sql_gen.Literal
79
    @param errors_table None|sql_gen.Table|str
80
    '''
81
    col = sql_gen.as_Col(col)
82
    
83
    # Don't convert exceptions to warnings for user-supplied constants
84
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
85
    
86
    assert not isinstance(col, sql_gen.NamedCol)
87
    
88
    function_name = strings.first_word(type_)
89
    srcs = col.srcs
90
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
91
        and col.srcs != ())
92
    if save_errors:
93
        srcs = map(sql_gen.to_name_only_col, col.srcs)
94
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
95
    function = db.TempFunction(function_name)
96
    
97
    # Create function definition
98
    modifiers = 'STRICT'
99
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
100
    handler = data_exception_handler(db, 'RETURN NULL;\n', srcs, errors_table)
101
    body = sql_gen.CustomCode(handler.to_str(db, '''\
102
/* The explicit cast to the return type is needed to make the cast happen
103
inside the try block. (Implicit casts to the return type happen at the end
104
of the function, outside any block.) */
105
RETURN value::'''+type_+''';
106
'''))
107
    body.lang='plpgsql'
108
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
109
        [sql_gen.FunctionParam('value', 'text')], modifiers))
110
    
111
    return sql_gen.FunctionCall(function, col)
112

    
113
def cast_temp_col(db, type_, col, errors_table=None):
114
    '''Like cast(), but creates a new column with the cast values if the input
115
    is a column.
116
    @return The new column or cast value
117
    '''
118
    def cast_(col): return cast(db, type_, col, errors_table)
119
    
120
    try: col = sql_gen.underlying_col(col)
121
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
122
    
123
    table = col.table
124
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
125
    expr = cast_(col)
126
    
127
    # Add column
128
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
129
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
130
    new_col.name = new_typed_col.name # propagate any renaming
131
    
132
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
133
    
134
    return new_col
135

    
136
def errors_table(db, table, if_exists=True):
137
    '''
138
    @param if_exists If set, returns None if the errors table doesn't exist
139
    @return None|sql_gen.Table
140
    '''
141
    table = sql_gen.as_Table(table)
142
    if table.srcs != (): table = table.srcs[0]
143
    
144
    errors_table = sql_gen.suffixed_table(table, '.errors')
145
    if if_exists and not sql.table_exists(db, errors_table): return None
146
    return errors_table
147

    
148
##### Import
149

    
150
def put(db, table, row, pkey_=None, row_ct_ref=None):
151
    '''Recovers from errors.
152
    Only works under PostgreSQL (uses INSERT RETURNING).
153
    '''
154
    row = sql_gen.ColDict(db, table, row)
155
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
156
    
157
    try:
158
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
159
        if row_ct_ref != None and cur.rowcount >= 0:
160
            row_ct_ref[0] += cur.rowcount
161
        return sql.value(cur)
162
    except sql.DuplicateKeyException, e:
163
        row = sql_gen.ColDict(db, table,
164
            util.dict_subset_right_join(row, e.cols))
165
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
166
            log_level=3.5))
167
    except sql.NullValueException: return None
168

    
169
def get(db, table, row, pkey, row_ct_ref=None, create=False):
170
    '''Recovers from errors'''
171
    try:
172
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
173
            recover=True))
174
    except StopIteration:
175
        if not create: raise
176
        return put(db, table, row, pkey, row_ct_ref) # insert new row
177

    
178
def is_func_result(col):
179
    return col.table.name.find('(') >= 0 and col.name == 'result'
180

    
181
def into_table_name(out_table, in_tables0, mapping, is_func):
182
    def in_col_str(in_col):
183
        in_col = sql_gen.remove_col_rename(in_col)
184
        if isinstance(in_col, sql_gen.Col):
185
            table = in_col.table
186
            if table == in_tables0:
187
                in_col = sql_gen.to_name_only_col(in_col)
188
            elif is_func_result(in_col): in_col = table # omit col name
189
        return str(in_col)
190
    
191
    str_ = str(out_table)
192
    if is_func:
193
        str_ += '('
194
        
195
        try: value_in_col = mapping['value']
196
        except KeyError:
197
            str_ += ', '.join((str(k)+'='+in_col_str(v)
198
                for k, v in mapping.iteritems()))
199
        else: str_ += in_col_str(value_in_col)
200
        
201
        str_ += ')'
202
    else:
203
        out_col = 'rank'
204
        try: in_col = mapping[out_col]
205
        except KeyError: str_ += '_pkeys'
206
        else: # has a rank column, so hierarchical
207
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
208
    return str_
209

    
210
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
211
    default=None, is_func=False, on_error=exc.raise_):
212
    '''Recovers from errors.
213
    Only works under PostgreSQL (uses INSERT RETURNING).
214
    IMPORTANT: Must be run at the *beginning* of a transaction.
215
    @param in_tables The main input table to select from, followed by a list of
216
        tables to join with it using the main input table's pkey
217
    @param mapping dict(out_table_col=in_table_col, ...)
218
        * out_table_col: str (*not* sql_gen.Col)
219
        * in_table_col: sql_gen.Col|literal-value
220
    @param into The table to contain the output and input pkeys.
221
        Defaults to `out_table.name+'_pkeys'`.
222
    @param default The *output* column to use as the pkey for missing rows.
223
        If this output column does not exist in the mapping, uses None.
224
    @param is_func Whether out_table is the name of a SQL function, not a table
225
    @return sql_gen.Col Where the output pkeys are made available
226
    '''
227
    import psycopg2.extensions
228
    
229
    out_table = sql_gen.as_Table(out_table)
230
    
231
    def log_debug(msg): db.log_debug(msg, level=1.5)
232
    def col_ustr(str_):
233
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
234
    
235
    log_debug('********** New iteration **********')
236
    log_debug('Inserting these input columns into '+strings.as_tt(
237
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
238
    
239
    is_function = sql.function_exists(db, out_table)
240
    
241
    if is_function: out_pkey = 'result'
242
    else: out_pkey = sql.pkey(db, out_table, recover=True)
243
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
244
    
245
    in_tables_ = in_tables[:] # don't modify input!
246
    try: in_tables0 = in_tables_.pop(0) # first table is separate
247
    except IndexError: in_tables0 = None
248
    else:
249
        in_pkey = sql.pkey(db, in_tables0, recover=True)
250
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
251
    
252
    # Determine if can use optimization for only literal values
253
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
254
        mapping.values()), False)
255
    is_literals_or_function = is_literals or is_function
256
    
257
    if in_tables0 == None: errors_table_ = None
258
    else: errors_table_ = errors_table(db, in_tables0)
259
    
260
    # Create input joins from list of input tables
261
    input_joins = [in_tables0]+[sql_gen.Join(v,
262
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
263
    
264
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
265
        mapping = {out_pkey: None} # ColDict will replace with default value
266
    
267
    if not is_literals:
268
        if into == None:
269
            into = into_table_name(out_table, in_tables0, mapping, is_func)
270
        into = sql_gen.as_Table(into)
271
        
272
        # Set column sources
273
        in_cols = filter(sql_gen.is_table_col, mapping.values())
274
        for col in in_cols:
275
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
276
        
277
        log_debug('Joining together input tables into temp table')
278
        # Place in new table so don't modify input and for speed
279
        in_table = sql_gen.Table('in')
280
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
281
            in_cols, preserve=[in_pkey_col]))
282
        input_joins = [in_table]
283
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
284
    
285
    mapping = sql_gen.ColDict(db, out_table, mapping)
286
        # after applying dicts.join() because that returns a plain dict
287
    
288
    # Resolve default value column
289
    if default != None:
290
        try: default = mapping[default]
291
        except KeyError:
292
            db.log_debug('Default value column '
293
                +strings.as_tt(strings.repr_no_u(default))
294
                +' does not exist in mapping, falling back to None', level=2.1)
295
            default = None
296
    
297
    # Save default values for all rows since in_table may have rows deleted
298
    if is_literals: pass
299
    elif is_function: full_in_table = in_table
300
    else:
301
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
302
        full_in_table_cols = [in_pkey_col]
303
        if default != None:
304
            full_in_table_cols.append(default)
305
            default = sql_gen.with_table(default, full_in_table)
306
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
307
            order_by=None), into=full_in_table, add_pkey_=True)
308
    
309
    if not is_literals:
310
        pkeys_names = [in_pkey, out_pkey]
311
        pkeys_cols = [in_pkey_col, out_pkey_col]
312
    
313
    pkeys_table_exists_ref = [False]
314
    def insert_into_pkeys(joins, cols=None, limit=None, **kw_args):
315
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
316
        if pkeys_table_exists_ref[0]:
317
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
318
        else:
319
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
320
            pkeys_table_exists_ref[0] = True
321
    
322
    limit_ref = [None]
323
    def mk_main_select(joins, cols):
324
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
325
    
326
    if is_literals: insert_in_table = None
327
    else:
328
        insert_in_table = in_table
329
        insert_in_tables = [insert_in_table]
330
    join_cols = sql_gen.ColDict(db, out_table)
331
    
332
    exc_strs = set()
333
    def log_exc(e):
334
        e_str = exc.str_(e, first_line_only=True)
335
        log_debug('Caught exception: '+e_str)
336
        assert e_str not in exc_strs # avoid infinite loops
337
        exc_strs.add(e_str)
338
    
339
    def remove_all_rows():
340
        log_debug('Ignoring all rows')
341
        limit_ref[0] = 0 # just create an empty pkeys table
342
    
343
    def ignore_cond(cond, e):
344
        out_table_cols = sql_gen.ColDict(db, out_table)
345
        out_table_cols.update(util.dict_subset_right_join({},
346
            sql.table_cols(db, out_table)))
347
        
348
        in_cols = []
349
        cond = sql.map_expr(db, cond, mapping, in_cols)
350
        cond = sql.map_expr(db, cond, out_table_cols)
351
        
352
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
353
            e.cause.pgcode,
354
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
355
        
356
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
357
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
358
        sql.delete(db, insert_in_table, not_cond)
359
    
360
    not_null_cols = set()
361
    def ignore(in_col, value, e):
362
        in_col = sql_gen.with_table(in_col, insert_in_table)
363
        
364
        track_data_error(db, errors_table_, in_col.srcs, value,
365
            e.cause.pgcode, e.cause.pgerror)
366
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
367
            +strings.as_tt(repr(value)))
368
        
369
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
370
        if value != None and in_col not in not_null_cols:
371
            # Try just mapping the value to NULL
372
            sql.update(db, insert_in_table, [(in_col, None)],
373
                sql_gen.ColValueCond(in_col, value))
374
        else:
375
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
376
            if value == None: not_null_cols.add(in_col)
377
    
378
    if not is_literals:
379
        def insert_pkeys_table(which):
380
            return sql_gen.Table(sql_gen.concat(in_table.name,
381
                '_insert_'+which+'_pkeys'))
382
        insert_out_pkeys = insert_pkeys_table('out')
383
        insert_in_pkeys = insert_pkeys_table('in')
384
    
385
    # Do inserts and selects
386
    while True:
387
        has_joins = join_cols != {}
388
        
389
        if limit_ref[0] == 0: # special case
390
            assert not has_joins
391
            
392
            if is_literals: return None
393
            log_debug('Creating an empty output pkeys table')
394
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
395
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
396
            break # don't do main case
397
        
398
        log_debug('Trying to insert new rows')
399
        
400
        # Prepare to insert new rows
401
        if is_function:
402
            log_debug('Calling function on input rows')
403
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
404
            func_call = sql_gen.NamedCol(out_pkey,
405
                sql_gen.FunctionCall(out_table, **args))
406
            
407
            if not is_literals:
408
                # Create empty pkeys table so its row type can be used
409
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
410
                    limit=0, recover=True)
411
                
412
                # Create error handling wrapper function
413
                select_cols = [in_pkey_col]+args.values()
414
                args = dict(((k, sql_gen.with_table(v, sql_gen.Table('row')))
415
                    for k, v in args.iteritems()))
416
                func_call = sql_gen.FunctionCall(out_table, **args)
417
                wrapper = db.TempFunction(sql_gen.concat(into.name, '_wrap'))
418
                sql.define_func(db, sql_gen.FunctionDef(wrapper,
419
                    sql_gen.SetOf(into),
420
                    sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
421
                        sql.mk_select(db, input_joins, order_by=None),
422
                        sql_gen.ReturnQuery(sql.mk_select(db,
423
                            fields=[sql_gen.Col(in_pkey, 'row'), func_call],
424
                            explain=False)),
425
                        exc_handler=sql_gen.plpythonu_error_handler)
426
                    ))
427
                wrapper_table = sql_gen.FunctionCall(wrapper)
428
        else:
429
            insert_args = dict(recover=True, cacheable=False)
430
            if has_joins:
431
                insert_args.update(dict(ignore=True))
432
            else:
433
                insert_args.update(dict(returning=out_pkey))
434
                if not is_literals:
435
                    insert_args.update(dict(into=insert_out_pkeys))
436
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
437
                c, insert_in_table) for c in mapping.values()])
438
        
439
        try:
440
            cur = None
441
            if is_function:
442
                if is_literals: cur = sql.select(db, fields=[func_call])
443
                else: insert_into_pkeys(wrapper_table, recover=True)
444
            else:
445
                cur = sql.insert_select(db, out_table, mapping.keys(),
446
                    main_select, **insert_args)
447
            break # insert successful
448
        except sql.MissingCastException, e:
449
            log_exc(e)
450
            
451
            out_col = e.col
452
            type_ = e.type
453
            
454
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
455
                +strings.as_tt(type_))
456
            in_col = mapping[out_col]
457
            while True:
458
                try:
459
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
460
                        errors_table_)
461
                    break # cast successful
462
                except sql.InvalidValueException, e:
463
                    log_exc(e)
464
                    
465
                    ignore(in_col, e.value, e)
466
        except sql.DuplicateKeyException, e:
467
            log_exc(e)
468
            
469
            # Different rows violating different unique constraints not
470
            # supported
471
            assert not join_cols
472
            
473
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
474
            log_debug('Ignoring existing rows, comparing on these columns:\n'
475
                +strings.as_inline_table(join_cols, ustr=col_ustr))
476
            
477
            if is_literals:
478
                return sql.value(sql.select(db, out_table, [out_pkey_col],
479
                    mapping, order_by=None))
480
            
481
            # Uniquify input table to avoid internal duplicate keys
482
            insert_in_table = sql.distinct_table(db, insert_in_table,
483
                join_cols.values())
484
            insert_in_tables.append(insert_in_table)
485
        except sql.NullValueException, e:
486
            log_exc(e)
487
            
488
            out_col, = e.cols
489
            try: in_col = mapping[out_col]
490
            except KeyError:
491
                msg = 'Missing mapping for NOT NULL column '+out_col
492
                log_debug(msg)
493
                if default == None: on_error(SyntaxError(msg)) # required col
494
                remove_all_rows()
495
            else: ignore(in_col, None, e)
496
        except sql.CheckException, e:
497
            log_exc(e)
498
            
499
            ignore_cond(e.cond, e)
500
        except sql.InvalidValueException, e:
501
            log_exc(e)
502
            
503
            for in_col in mapping.values(): ignore(in_col, e.value, e)
504
        except psycopg2.extensions.TransactionRollbackError, e:
505
            log_exc(e)
506
            # retry
507
        except sql.DatabaseErrors, e:
508
            log_exc(e)
509
            
510
            log_debug('No handler for exception')
511
            on_error(e)
512
            remove_all_rows()
513
        # after exception handled, rerun loop with additional constraints
514
    
515
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
516
        row_ct_ref[0] += cur.rowcount
517
    
518
    if is_literals_or_function: pass # pkeys table already created
519
    elif has_joins:
520
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
521
        log_debug('Getting output table pkeys of existing/inserted rows')
522
        insert_into_pkeys(select_joins, pkeys_cols)
523
    else:
524
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
525
        
526
        log_debug('Getting input table pkeys of inserted rows')
527
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
528
        # since the SELECT query is identical to the one used in INSERT SELECT,
529
        # its rows will be retrieved in the same order.
530
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
531
            into=insert_in_pkeys)
532
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
533
        
534
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
535
            db, insert_in_pkeys)
536
        
537
        log_debug('Combining output and input pkeys in inserted order')
538
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
539
            {sql.row_num_col: sql_gen.join_same_not_null})]
540
        insert_into_pkeys(pkey_joins, pkeys_names)
541
        
542
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
543
    
544
    if not is_literals_or_function:
545
        log_debug('Setting pkeys of missing rows to '
546
            +strings.as_tt(repr(default)))
547
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
548
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
549
            # must use join_same_not_null or query will take forever
550
        insert_into_pkeys(missing_rows_joins,
551
            [sql_gen.Col(in_pkey, full_in_table),
552
            sql_gen.NamedCol(out_pkey, default)])
553
    # otherwise, there is already an entry for every row
554
    
555
    if is_literals: return sql.value(cur)
556
    else:
557
        assert (sql.table_row_count(db, into)
558
            == sql.table_row_count(db, full_in_table))
559
        
560
        sql.empty_temp(db, insert_in_tables+[full_in_table])
561
        
562
        srcs = []
563
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
564
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)