Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
def data_exception_handler(db, return_, srcs=[], errors_table=None):
41
    '''Handles a data_exception by saving the error or converting it to a
42
    warning, and returning NULL.
43
    @param return_ Statement to return a default value in case of error
44
    @param srcs The column names for the errors table
45
    @param errors_table None|sql_gen.Table
46
    @pre The invalid value must be in a local variable "value" of type text.
47
    '''
48
    return_ = sql_gen.as_Code(return_)
49
    
50
    save_errors = errors_table != None and srcs
51
    handler = ''
52
    if save_errors:
53
        errors_table_cols = map(sql_gen.Col,
54
            ['column', 'value', 'error_code', 'error'])
55
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
56
            [[c.name] for c in srcs]), order_by=None)
57
        insert_query = sql.mk_insert_select(db, errors_table, errors_table_cols,
58
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
59
        handler += '''\
60
-- Save error in errors table.
61
DECLARE
62
    error_code text := SQLSTATE;
63
    error text := SQLERRM;
64
BEGIN
65
    -- Insert the value and error for *each* source column.
66
'''+strings.indent(sql_gen.RowExcIgnore('text', col_names_query, insert_query,
67
    row_var=errors_table_cols[0]).to_str(db))+'''
68
END;
69

    
70
'''
71
    else:
72
        handler += '''\
73
RAISE WARNING '%', SQLERRM;
74
'''
75
    handler += return_.to_str(db)
76
    return sql_gen.ExcHandler('data_exception', handler)
77

    
78
def cast(db, type_, col, errors_table=None):
79
    '''Casts an (unrenamed) column or value.
80
    If errors_table set and col has srcs, saves errors in errors_table (using
81
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
82
    @param col str|sql_gen.Col|sql_gen.Literal
83
    @param errors_table None|sql_gen.Table|str
84
    '''
85
    col = sql_gen.as_Col(col)
86
    
87
    # Don't convert exceptions to warnings for user-supplied constants
88
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
89
    
90
    assert not isinstance(col, sql_gen.NamedCol)
91
    
92
    function_name = strings.first_word(type_)
93
    srcs = col.srcs
94
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
95
        and col.srcs != ())
96
    if save_errors:
97
        srcs = map(sql_gen.to_name_only_col, col.srcs)
98
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
99
    function = db.TempFunction(function_name)
100
    
101
    # Create function definition
102
    modifiers = 'STRICT'
103
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
104
    handler = data_exception_handler(db, 'RETURN NULL;\n', srcs, errors_table)
105
    body = sql_gen.CustomCode(handler.to_str(db, '''\
106
/* The explicit cast to the return type is needed to make the cast happen
107
inside the try block. (Implicit casts to the return type happen at the end
108
of the function, outside any block.) */
109
RETURN value::'''+type_+''';
110
'''))
111
    body.lang='plpgsql'
112
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
113
        [sql_gen.FunctionParam('value', 'text')], modifiers))
114
    
115
    return sql_gen.FunctionCall(function, col)
116

    
117
def cast_temp_col(db, type_, col, errors_table=None):
118
    '''Like cast(), but creates a new column with the cast values if the input
119
    is a column.
120
    @return The new column or cast value
121
    '''
122
    def cast_(col): return cast(db, type_, col, errors_table)
123
    
124
    try: col = sql_gen.underlying_col(col)
125
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
126
    
127
    table = col.table
128
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
129
    expr = cast_(col)
130
    
131
    # Add column
132
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
133
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
134
    new_col.name = new_typed_col.name # propagate any renaming
135
    
136
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
137
    
138
    return new_col
139

    
140
def errors_table(db, table, if_exists=True):
141
    '''
142
    @param if_exists If set, returns None if the errors table doesn't exist
143
    @return None|sql_gen.Table
144
    '''
145
    table = sql_gen.as_Table(table)
146
    if table.srcs != (): table = table.srcs[0]
147
    
148
    errors_table = sql_gen.suffixed_table(table, '.errors')
149
    if if_exists and not sql.table_exists(db, errors_table): return None
150
    return errors_table
151

    
152
##### Import
153

    
154
def put(db, table, row, pkey_=None, row_ct_ref=None):
155
    '''Recovers from errors.
156
    Only works under PostgreSQL (uses INSERT RETURNING).
157
    '''
158
    row = sql_gen.ColDict(db, table, row)
159
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
160
    
161
    try:
162
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
163
        if row_ct_ref != None and cur.rowcount >= 0:
164
            row_ct_ref[0] += cur.rowcount
165
        return sql.value(cur)
166
    except sql.DuplicateKeyException, e:
167
        row = sql_gen.ColDict(db, table,
168
            util.dict_subset_right_join(row, e.cols))
169
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
170
            log_level=3.5))
171
    except sql.NullValueException: return None
172

    
173
def get(db, table, row, pkey, row_ct_ref=None, create=False):
174
    '''Recovers from errors'''
175
    try:
176
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
177
            recover=True))
178
    except StopIteration:
179
        if not create: raise
180
        return put(db, table, row, pkey, row_ct_ref) # insert new row
181

    
182
def is_func_result(col):
183
    return col.table.name.find('(') >= 0 and col.name == 'result'
184

    
185
def into_table_name(out_table, in_tables0, mapping, is_func):
186
    def in_col_str(in_col):
187
        in_col = sql_gen.remove_col_rename(in_col)
188
        if isinstance(in_col, sql_gen.Col):
189
            table = in_col.table
190
            if table == in_tables0:
191
                in_col = sql_gen.to_name_only_col(in_col)
192
            elif is_func_result(in_col): in_col = table # omit col name
193
        return str(in_col)
194
    
195
    str_ = str(out_table)
196
    if is_func:
197
        str_ += '('
198
        
199
        try: value_in_col = mapping['value']
200
        except KeyError:
201
            str_ += ', '.join((str(k)+'='+in_col_str(v)
202
                for k, v in mapping.iteritems()))
203
        else: str_ += in_col_str(value_in_col)
204
        
205
        str_ += ')'
206
    else:
207
        out_col = 'rank'
208
        try: in_col = mapping[out_col]
209
        except KeyError: str_ += '_pkeys'
210
        else: # has a rank column, so hierarchical
211
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
212
    return str_
213

    
214
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
215
    default=None, is_func=False, on_error=exc.raise_):
216
    '''Recovers from errors.
217
    Only works under PostgreSQL (uses INSERT RETURNING).
218
    IMPORTANT: Must be run at the *beginning* of a transaction.
219
    @param in_tables The main input table to select from, followed by a list of
220
        tables to join with it using the main input table's pkey
221
    @param mapping dict(out_table_col=in_table_col, ...)
222
        * out_table_col: str (*not* sql_gen.Col)
223
        * in_table_col: sql_gen.Col|literal-value
224
    @param into The table to contain the output and input pkeys.
225
        Defaults to `out_table.name+'_pkeys'`.
226
    @param default The *output* column to use as the pkey for missing rows.
227
        If this output column does not exist in the mapping, uses None.
228
    @param is_func Whether out_table is the name of a SQL function, not a table
229
    @return sql_gen.Col Where the output pkeys are made available
230
    '''
231
    import psycopg2.extensions
232
    
233
    out_table = sql_gen.as_Table(out_table)
234
    
235
    def log_debug(msg): db.log_debug(msg, level=1.5)
236
    def col_ustr(str_):
237
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
238
    
239
    log_debug('********** New iteration **********')
240
    log_debug('Inserting these input columns into '+strings.as_tt(
241
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
242
    
243
    is_function = sql.function_exists(db, out_table)
244
    
245
    if is_function: out_pkey = 'result'
246
    else: out_pkey = sql.pkey(db, out_table, recover=True)
247
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
248
    
249
    in_tables_ = in_tables[:] # don't modify input!
250
    try: in_tables0 = in_tables_.pop(0) # first table is separate
251
    except IndexError: in_tables0 = None
252
    else:
253
        in_pkey = sql.pkey(db, in_tables0, recover=True)
254
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
255
    
256
    # Determine if can use optimization for only literal values
257
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
258
        mapping.values()), False)
259
    is_literals_or_function = is_literals or is_function
260
    
261
    if in_tables0 == None: errors_table_ = None
262
    else: errors_table_ = errors_table(db, in_tables0)
263
    
264
    # Create input joins from list of input tables
265
    input_joins = [in_tables0]+[sql_gen.Join(v,
266
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
267
    
268
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
269
        mapping = {out_pkey: None} # ColDict will replace with default value
270
    
271
    if not is_literals:
272
        if into == None:
273
            into = into_table_name(out_table, in_tables0, mapping, is_func)
274
        into = sql_gen.as_Table(into)
275
        
276
        # Set column sources
277
        in_cols = filter(sql_gen.is_table_col, mapping.values())
278
        for col in in_cols:
279
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
280
        
281
        log_debug('Joining together input tables into temp table')
282
        # Place in new table so don't modify input and for speed
283
        in_table = sql_gen.Table('in')
284
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
285
            in_cols, preserve=[in_pkey_col]))
286
        input_joins = [in_table]
287
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
288
    
289
    mapping = sql_gen.ColDict(db, out_table, mapping)
290
        # after applying dicts.join() because that returns a plain dict
291
    
292
    # Resolve default value column
293
    if default != None:
294
        try: default = mapping[default]
295
        except KeyError:
296
            db.log_debug('Default value column '
297
                +strings.as_tt(strings.repr_no_u(default))
298
                +' does not exist in mapping, falling back to None', level=2.1)
299
            default = None
300
    
301
    # Save default values for all rows since in_table may have rows deleted
302
    if is_literals: pass
303
    elif is_function: full_in_table = in_table
304
    else:
305
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
306
        full_in_table_cols = [in_pkey_col]
307
        if default != None:
308
            full_in_table_cols.append(default)
309
            default = sql_gen.with_table(default, full_in_table)
310
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
311
            order_by=None), into=full_in_table, add_pkey_=True)
312
    
313
    if not is_literals:
314
        pkeys_names = [in_pkey, out_pkey]
315
        pkeys_cols = [in_pkey_col, out_pkey_col]
316
    
317
    pkeys_table_exists_ref = [False]
318
    def insert_into_pkeys(joins, cols=None, limit=None, **kw_args):
319
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
320
        if pkeys_table_exists_ref[0]:
321
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
322
        else:
323
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
324
            pkeys_table_exists_ref[0] = True
325
    
326
    limit_ref = [None]
327
    def mk_main_select(joins, cols):
328
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
329
    
330
    if is_literals: insert_in_table = None
331
    else:
332
        insert_in_table = in_table
333
        insert_in_tables = [insert_in_table]
334
    join_cols = sql_gen.ColDict(db, out_table)
335
    
336
    exc_strs = set()
337
    def log_exc(e):
338
        e_str = exc.str_(e, first_line_only=True)
339
        log_debug('Caught exception: '+e_str)
340
        assert e_str not in exc_strs # avoid infinite loops
341
        exc_strs.add(e_str)
342
    
343
    def remove_all_rows():
344
        log_debug('Ignoring all rows')
345
        limit_ref[0] = 0 # just create an empty pkeys table
346
    
347
    def ignore_cond(cond, e):
348
        out_table_cols = sql_gen.ColDict(db, out_table)
349
        out_table_cols.update(util.dict_subset_right_join({},
350
            sql.table_cols(db, out_table)))
351
        
352
        in_cols = []
353
        cond = sql.map_expr(db, cond, mapping, in_cols)
354
        cond = sql.map_expr(db, cond, out_table_cols)
355
        
356
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
357
            e.cause.pgcode,
358
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
359
        
360
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
361
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
362
        sql.delete(db, insert_in_table, not_cond)
363
    
364
    not_null_cols = set()
365
    def ignore(in_col, value, e):
366
        in_col = sql_gen.with_table(in_col, insert_in_table)
367
        
368
        track_data_error(db, errors_table_, in_col.srcs, value,
369
            e.cause.pgcode, e.cause.pgerror)
370
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
371
            +strings.as_tt(repr(value)))
372
        
373
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
374
        if value != None and in_col not in not_null_cols:
375
            # Try just mapping the value to NULL
376
            sql.update(db, insert_in_table, [(in_col, None)],
377
                sql_gen.ColValueCond(in_col, value))
378
        else:
379
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
380
            if value == None: not_null_cols.add(in_col)
381
    
382
    if not is_literals:
383
        def insert_pkeys_table(which):
384
            return sql_gen.Table(sql_gen.concat(in_table.name,
385
                '_insert_'+which+'_pkeys'))
386
        insert_out_pkeys = insert_pkeys_table('out')
387
        insert_in_pkeys = insert_pkeys_table('in')
388
    
389
    # Do inserts and selects
390
    while True:
391
        has_joins = join_cols != {}
392
        
393
        if limit_ref[0] == 0: # special case
394
            assert not has_joins
395
            
396
            if is_literals: return None
397
            log_debug('Creating an empty output pkeys table')
398
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
399
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
400
            break # don't do main case
401
        
402
        log_debug('Trying to insert new rows')
403
        
404
        # Prepare to insert new rows
405
        if is_function:
406
            log_debug('Calling function on input rows')
407
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
408
            func_call = sql_gen.NamedCol(out_pkey,
409
                sql_gen.FunctionCall(out_table, **args))
410
            
411
            if not is_literals:
412
                # Create empty pkeys table so its row type can be used
413
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
414
                    limit=0, recover=True)
415
                
416
                # Create error handling wrapper function
417
                select_cols = [in_pkey_col]+args.values()
418
                args = dict(((k, sql_gen.with_table(v, sql_gen.Table('row')))
419
                    for k, v in args.iteritems()))
420
                func_call = sql_gen.FunctionCall(out_table, **args)
421
                wrapper = db.TempFunction(sql_gen.concat(into.name, '_wrap'))
422
                sql.define_func(db, sql_gen.FunctionDef(wrapper,
423
                    sql_gen.SetOf(into),
424
                    sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
425
                        sql.mk_select(db, input_joins, order_by=None),
426
                        sql_gen.ReturnQuery(sql.mk_select(db,
427
                            fields=[sql_gen.Col(in_pkey, 'row'), func_call],
428
                            explain=False)),
429
                        exc_handler=sql_gen.plpythonu_error_handler)
430
                    ))
431
                wrapper_table = sql_gen.FunctionCall(wrapper)
432
        else:
433
            insert_args = dict(recover=True, cacheable=False)
434
            if has_joins:
435
                insert_args.update(dict(ignore=True))
436
            else:
437
                insert_args.update(dict(returning=out_pkey))
438
                if not is_literals:
439
                    insert_args.update(dict(into=insert_out_pkeys))
440
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
441
                c, insert_in_table) for c in mapping.values()])
442
        
443
        try:
444
            cur = None
445
            if is_function:
446
                if is_literals: cur = sql.select(db, fields=[func_call])
447
                else: insert_into_pkeys(wrapper_table, recover=True)
448
            else:
449
                cur = sql.insert_select(db, out_table, mapping.keys(),
450
                    main_select, **insert_args)
451
            break # insert successful
452
        except sql.MissingCastException, e:
453
            log_exc(e)
454
            
455
            out_col = e.col
456
            type_ = e.type
457
            
458
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
459
                +strings.as_tt(type_))
460
            in_col = mapping[out_col]
461
            while True:
462
                try:
463
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
464
                        errors_table_)
465
                    break # cast successful
466
                except sql.InvalidValueException, e:
467
                    log_exc(e)
468
                    
469
                    ignore(in_col, e.value, e)
470
        except sql.DuplicateKeyException, e:
471
            log_exc(e)
472
            
473
            # Different rows violating different unique constraints not
474
            # supported
475
            assert not join_cols
476
            
477
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
478
            log_debug('Ignoring existing rows, comparing on these columns:\n'
479
                +strings.as_inline_table(join_cols, ustr=col_ustr))
480
            
481
            if is_literals:
482
                return sql.value(sql.select(db, out_table, [out_pkey_col],
483
                    mapping, order_by=None))
484
            
485
            # Uniquify input table to avoid internal duplicate keys
486
            insert_in_table = sql.distinct_table(db, insert_in_table,
487
                join_cols.values())
488
            insert_in_tables.append(insert_in_table)
489
        except sql.NullValueException, e:
490
            log_exc(e)
491
            
492
            out_col, = e.cols
493
            try: in_col = mapping[out_col]
494
            except KeyError:
495
                msg = 'Missing mapping for NOT NULL column '+out_col
496
                log_debug(msg)
497
                if default == None: on_error(SyntaxError(msg)) # required col
498
                remove_all_rows()
499
            else: ignore(in_col, None, e)
500
        except sql.CheckException, e:
501
            log_exc(e)
502
            
503
            ignore_cond(e.cond, e)
504
        except sql.InvalidValueException, e:
505
            log_exc(e)
506
            
507
            for in_col in mapping.values(): ignore(in_col, e.value, e)
508
        except psycopg2.extensions.TransactionRollbackError, e:
509
            log_exc(e)
510
            # retry
511
        except sql.DatabaseErrors, e:
512
            log_exc(e)
513
            
514
            log_debug('No handler for exception')
515
            on_error(e)
516
            remove_all_rows()
517
        # after exception handled, rerun loop with additional constraints
518
    
519
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
520
        row_ct_ref[0] += cur.rowcount
521
    
522
    if is_literals_or_function: pass # pkeys table already created
523
    elif has_joins:
524
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
525
        log_debug('Getting output table pkeys of existing/inserted rows')
526
        insert_into_pkeys(select_joins, pkeys_cols)
527
    else:
528
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
529
        
530
        log_debug('Getting input table pkeys of inserted rows')
531
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
532
        # since the SELECT query is identical to the one used in INSERT SELECT,
533
        # its rows will be retrieved in the same order.
534
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
535
            into=insert_in_pkeys)
536
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
537
        
538
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
539
            db, insert_in_pkeys)
540
        
541
        log_debug('Combining output and input pkeys in inserted order')
542
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
543
            {sql.row_num_col: sql_gen.join_same_not_null})]
544
        insert_into_pkeys(pkey_joins, pkeys_names)
545
        
546
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
547
    
548
    if not is_literals_or_function:
549
        log_debug('Setting pkeys of missing rows to '
550
            +strings.as_tt(repr(default)))
551
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
552
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
553
            # must use join_same_not_null or query will take forever
554
        insert_into_pkeys(missing_rows_joins,
555
            [sql_gen.Col(in_pkey, full_in_table),
556
            sql_gen.NamedCol(out_pkey, default)])
557
    # otherwise, there is already an entry for every row
558
    
559
    if is_literals: return sql.value(cur)
560
    else:
561
        assert (sql.table_row_count(db, into)
562
            == sql.table_row_count(db, full_in_table))
563
        
564
        sql.empty_temp(db, insert_in_tables+[full_in_table])
565
        
566
        srcs = []
567
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
568
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)