Project

General

Profile

1
# Database import/export
2

    
3
import exc
4
import dicts
5
import sql
6
import sql_gen
7
import strings
8
import util
9

    
10
##### Data cleanup
11

    
12
def cleanup_table(db, table, cols):
13
    table = sql_gen.as_Table(table)
14
    cols = map(sql_gen.as_Col, cols)
15
    
16
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
17
        +db.esc_value(r'\N')+')')
18
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
19
        for v in cols]
20
    
21
    sql.update(db, table, changes, in_place=True)
22

    
23
##### Error tracking
24

    
25
def track_data_error(db, errors_table, cols, value, error_code, error):
26
    '''
27
    @param errors_table If None, does nothing.
28
    '''
29
    if errors_table == None or cols == (): return
30
    
31
    for col in cols:
32
        try:
33
            sql.insert(db, errors_table, dict(column=col.name, value=value,
34
                error_code=error_code, error=error), recover=True,
35
                cacheable=True, log_level=4)
36
        except sql.DuplicateKeyException: pass
37

    
38
def cast(db, type_, col, errors_table=None):
39
    '''Casts an (unrenamed) column or value.
40
    If errors_table set and col has srcs, saves errors in errors_table (using
41
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
42
    @param col str|sql_gen.Col|sql_gen.Literal
43
    @param errors_table None|sql_gen.Table|str
44
    '''
45
    col = sql_gen.as_Col(col)
46
    
47
    # Don't convert exceptions to warnings for user-supplied constants
48
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
49
    
50
    assert not isinstance(col, sql_gen.NamedCol)
51
    
52
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
53
        and col.srcs != ())
54
    function_name = type_
55
    if save_errors:
56
        errors_table = sql_gen.as_Table(errors_table)
57
        
58
        srcs = map(sql_gen.to_name_only_col, col.srcs)
59
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
60
    function = db.TempFunction(function_name)
61
    
62
    while True:
63
        # Create function definition
64
        query = '''\
65
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
66
RETURNS '''+type_+'''
67
LANGUAGE plpgsql
68
'''
69
        if not save_errors: query += 'IMMUTABLE '
70
        query += '''\
71
STRICT
72
AS $$
73
BEGIN
74
    /* The explicit cast to the return type is needed to make the cast happen
75
    inside the try block. (Implicit casts to the return type happen at the end
76
    of the function, outside any block.) */
77
    RETURN value::'''+type_+''';
78
EXCEPTION
79
    WHEN data_exception THEN
80
'''
81
        if save_errors:
82
            errors_table_cols = map(sql_gen.Col,
83
                ['column', 'value', 'error_code', 'error'])
84
            query += '''\
85
        -- Save error in errors table.
86
        DECLARE
87
            error_code text := SQLSTATE;
88
            error text := SQLERRM;
89
            "column" text;
90
        BEGIN
91
            -- Insert the value and error for *each* source column.
92
            FOR "column" IN
93
'''+sql.mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
94
    order_by=None)+'''
95
            LOOP
96
                BEGIN
97
'''+sql.mk_insert_select(db, errors_table, errors_table_cols,
98
    sql_gen.Values(errors_table_cols).to_str(db))+''';
99
                EXCEPTION
100
                    WHEN unique_violation THEN NULL; -- continue to next row
101
                END;
102
            END LOOP;
103
        END;
104
'''
105
        else:
106
            query += '''\
107
        RAISE WARNING '%', SQLERRM;
108
'''
109
        query += '''\
110
        RETURN NULL;
111
END;
112
$$;
113
'''
114
        
115
        # Create function
116
        try:
117
            sql.run_query(db, query, recover=True, cacheable=True,
118
                log_ignore_excs=(sql.DuplicateException,))
119
            break # successful
120
        except sql.DuplicateException:
121
            function.name = sql.next_version(function.name)
122
            # try again with next version of name
123
    
124
    return sql_gen.FunctionCall(function, col)
125

    
126
def cast_temp_col(db, type_, col, errors_table=None):
127
    '''Like cast(), but creates a new column with the cast values if the input
128
    is a column.
129
    @return The new column or cast value
130
    '''
131
    def cast_(col): return cast(db, type_, col, errors_table)
132
    
133
    try: col = sql_gen.underlying_col(col)
134
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
135
    
136
    table = col.table
137
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
138
    expr = cast_(col)
139
    
140
    # Add column
141
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
142
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
143
    new_col.name = new_typed_col.name # propagate any renaming
144
    
145
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
146
    
147
    return new_col
148

    
149
def errors_table(db, table, if_exists=True):
150
    '''
151
    @param if_exists If set, returns None if the errors table doesn't exist
152
    @return None|sql_gen.Table
153
    '''
154
    table = sql_gen.as_Table(table)
155
    if table.srcs != (): table = table.srcs[0]
156
    
157
    errors_table = sql_gen.suffixed_table(table, '.errors')
158
    if if_exists and not sql.table_exists(db, errors_table): return None
159
    return errors_table
160

    
161
##### Import
162

    
163
def put(db, table, row, pkey_=None, row_ct_ref=None):
164
    '''Recovers from errors.
165
    Only works under PostgreSQL (uses INSERT RETURNING).
166
    '''
167
    row = sql_gen.ColDict(db, table, row)
168
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
169
    
170
    try:
171
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
172
        if row_ct_ref != None and cur.rowcount >= 0:
173
            row_ct_ref[0] += cur.rowcount
174
        return sql.value(cur)
175
    except sql.DuplicateKeyException, e:
176
        row = sql_gen.ColDict(db, table,
177
            util.dict_subset_right_join(row, e.cols))
178
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
179
            log_level=3.5))
180
    except sql.NullValueException: return None
181

    
182
def get(db, table, row, pkey, row_ct_ref=None, create=False):
183
    '''Recovers from errors'''
184
    try:
185
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
186
            recover=True))
187
    except StopIteration:
188
        if not create: raise
189
        return put(db, table, row, pkey, row_ct_ref) # insert new row
190

    
191
def is_func_result(col):
192
    return col.table.name.find('(') >= 0 and col.name == 'result'
193

    
194
def into_table_name(out_table, in_tables0, mapping, is_func):
195
    def in_col_str(in_col):
196
        in_col = sql_gen.remove_col_rename(in_col)
197
        if isinstance(in_col, sql_gen.Col):
198
            table = in_col.table
199
            if table == in_tables0:
200
                in_col = sql_gen.to_name_only_col(in_col)
201
            elif is_func_result(in_col): in_col = table # omit col name
202
        return str(in_col)
203
    
204
    str_ = str(out_table)
205
    if is_func:
206
        str_ += '('
207
        
208
        try: value_in_col = mapping['value']
209
        except KeyError:
210
            str_ += ', '.join((str(k)+'='+in_col_str(v)
211
                for k, v in mapping.iteritems()))
212
        else: str_ += in_col_str(value_in_col)
213
        
214
        str_ += ')'
215
    else:
216
        out_col = 'rank'
217
        try: in_col = mapping[out_col]
218
        except KeyError: str_ += '_pkeys'
219
        else: # has a rank column, so hierarchical
220
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
221
    return str_
222

    
223
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
224
    default=None, is_func=False, on_error=exc.raise_):
225
    '''Recovers from errors.
226
    Only works under PostgreSQL (uses INSERT RETURNING).
227
    IMPORTANT: Must be run at the *beginning* of a transaction.
228
    @param in_tables The main input table to select from, followed by a list of
229
        tables to join with it using the main input table's pkey
230
    @param mapping dict(out_table_col=in_table_col, ...)
231
        * out_table_col: str (*not* sql_gen.Col)
232
        * in_table_col: sql_gen.Col|literal-value
233
    @param into The table to contain the output and input pkeys.
234
        Defaults to `out_table.name+'_pkeys'`.
235
    @param default The *output* column to use as the pkey for missing rows.
236
        If this output column does not exist in the mapping, uses None.
237
    @param is_func Whether out_table is the name of a SQL function, not a table
238
    @return sql_gen.Col Where the output pkeys are made available
239
    '''
240
    out_table = sql_gen.as_Table(out_table)
241
    
242
    def log_debug(msg): db.log_debug(msg, level=1.5)
243
    def col_ustr(str_):
244
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
245
    
246
    log_debug('********** New iteration **********')
247
    log_debug('Inserting these input columns into '+strings.as_tt(
248
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
249
    
250
    is_function = sql.function_exists(db, out_table)
251
    
252
    if is_function: out_pkey = 'result'
253
    else: out_pkey = sql.pkey(db, out_table, recover=True)
254
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
255
    
256
    if mapping == {}: # need at least one column for INSERT SELECT
257
        mapping = {out_pkey: None} # ColDict will replace with default value
258
    
259
    # Create input joins from list of input tables
260
    in_tables_ = in_tables[:] # don't modify input!
261
    in_tables0 = in_tables_.pop(0) # first table is separate
262
    errors_table_ = errors_table(db, in_tables0)
263
    in_pkey = sql.pkey(db, in_tables0, recover=True)
264
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
265
    input_joins = [in_tables0]+[sql_gen.Join(v,
266
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
267
    
268
    if into == None:
269
        into = into_table_name(out_table, in_tables0, mapping, is_func)
270
    into = sql_gen.as_Table(into)
271
    
272
    # Set column sources
273
    in_cols = filter(sql_gen.is_table_col, mapping.values())
274
    for col in in_cols:
275
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
276
    
277
    log_debug('Joining together input tables into temp table')
278
    # Place in new table for speed and so don't modify input if values edited
279
    in_table = sql_gen.Table('in')
280
    mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
281
        in_cols, preserve=[in_pkey_col]))
282
    input_joins = [in_table]
283
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
284
    
285
    mapping = sql_gen.ColDict(db, out_table, mapping)
286
        # after applying dicts.join() because that returns a plain dict
287
    
288
    # Resolve default value column
289
    if default != None:
290
        try: default = mapping[default]
291
        except KeyError:
292
            db.log_debug('Default value column '
293
                +strings.as_tt(strings.repr_no_u(default))
294
                +' does not exist in mapping, falling back to None', level=2.1)
295
            default = None
296
    
297
    # Save default values for all rows since in_table may have rows deleted
298
    if is_function: full_in_table = in_table
299
    else:
300
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
301
        full_in_table_cols = [in_pkey_col]
302
        if default != None:
303
            full_in_table_cols.append(default)
304
            default = sql_gen.with_table(default, full_in_table)
305
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
306
            order_by=None), into=full_in_table, add_pkey_=True)
307
    
308
    pkeys_names = [in_pkey, out_pkey]
309
    pkeys_cols = [in_pkey_col, out_pkey_col]
310
    
311
    pkeys_table_exists_ref = [False]
312
    def insert_into_pkeys(joins, cols, **kw_args):
313
        query = sql.mk_select(db, joins, cols, order_by=None)
314
        if pkeys_table_exists_ref[0]:
315
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
316
        else:
317
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
318
            pkeys_table_exists_ref[0] = True
319
    
320
    limit_ref = [None]
321
    insert_in_table = in_table
322
    insert_in_tables = [insert_in_table]
323
    conds = set()
324
    join_cols = sql_gen.ColDict(db, out_table)
325
    def mk_main_select(joins, cols):
326
        conds_ = [(sql_gen.with_table(k, insert_in_table), v) for k, v in conds]
327
        return sql.mk_select(db, joins, cols, conds_, limit=limit_ref[0],
328
            order_by=None)
329
    
330
    exc_strs = set()
331
    def log_exc(e):
332
        e_str = exc.str_(e, first_line_only=True)
333
        log_debug('Caught exception: '+e_str)
334
        assert e_str not in exc_strs # avoid infinite loops
335
        exc_strs.add(e_str)
336
    
337
    def remove_all_rows():
338
        log_debug('Ignoring all rows')
339
        limit_ref[0] = 0 # just create an empty pkeys table
340
    
341
    def ignore_cond(cond, e):
342
        out_table_cols = sql_gen.ColDict(db, out_table)
343
        out_table_cols.update(util.dict_subset_right_join({},
344
            sql.table_cols(db, out_table)))
345
        
346
        in_cols = []
347
        cond = sql.map_expr(db, cond, mapping, in_cols)
348
        cond = sql.map_expr(db, cond, out_table_cols)
349
        
350
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
351
            e.cause.pgcode,
352
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
353
        
354
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
355
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
356
        sql.delete(db, insert_in_table, not_cond)
357
    
358
    not_null_cols = set()
359
    def ignore(in_col, value, e):
360
        in_col = sql_gen.with_table(in_col, insert_in_table)
361
        
362
        track_data_error(db, errors_table_, in_col.srcs, value,
363
            e.cause.pgcode, e.cause.pgerror)
364
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
365
            +strings.as_tt(repr(value)))
366
        
367
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
368
        if value != None and in_col not in not_null_cols:
369
            # Try just mapping the value to NULL
370
            sql.update(db, insert_in_table, [(in_col, None)],
371
                sql_gen.ColValueCond(in_col, value))
372
        else:
373
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
374
            if value == None: not_null_cols.add(in_col)
375
    
376
    def insert_pkeys_table(which):
377
        return sql_gen.Table(sql_gen.concat(in_table.name,
378
            '_insert_'+which+'_pkeys'))
379
    insert_out_pkeys = insert_pkeys_table('out')
380
    insert_in_pkeys = insert_pkeys_table('in')
381
    
382
    # Do inserts and selects
383
    while True:
384
        if limit_ref[0] == 0: # special case
385
            log_debug('Creating an empty pkeys table')
386
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
387
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
388
            break # don't do main case
389
        
390
        has_joins = join_cols != {}
391
        
392
        log_debug('Trying to insert new rows')
393
        
394
        # Prepare to insert new rows
395
        if is_function:
396
            log_debug('Calling function on input rows')
397
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
398
            func_call = sql_gen.NamedCol(out_pkey,
399
                sql_gen.FunctionCall(out_table, **args))
400
        else:
401
            insert_args = dict(recover=True, cacheable=False)
402
            if has_joins:
403
                insert_args.update(dict(ignore=True))
404
            else:
405
                insert_args.update(dict(returning=out_pkey,
406
                    into=insert_out_pkeys))
407
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
408
                c, insert_in_table) for c in mapping.values()])
409
        
410
        try:
411
            cur = None
412
            if is_function:
413
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
414
                    recover=True)
415
            else:
416
                cur = sql.insert_select(db, out_table, mapping.keys(),
417
                    main_select, **insert_args)
418
            break # insert successful
419
        except sql.MissingCastException, e:
420
            log_exc(e)
421
            
422
            out_col = e.col
423
            type_ = e.type
424
            
425
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
426
                +strings.as_tt(type_))
427
            in_col = mapping[out_col]
428
            while True:
429
                try:
430
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
431
                        errors_table_)
432
                    break # cast successful
433
                except sql.InvalidValueException, e:
434
                    log_exc(e)
435
                    
436
                    ignore(in_col, e.value, e)
437
        except sql.DuplicateKeyException, e:
438
            log_exc(e)
439
            
440
            # Different rows violating different unique constraints not
441
            # supported
442
            assert not join_cols
443
            
444
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
445
            log_debug('Ignoring existing rows, comparing on these columns:\n'
446
                +strings.as_inline_table(join_cols, ustr=col_ustr))
447
            
448
            # Uniquify input table to avoid internal duplicate keys
449
            insert_in_table = sql.distinct_table(db, insert_in_table,
450
                join_cols.values())
451
            insert_in_tables.append(insert_in_table)
452
        except sql.NullValueException, e:
453
            log_exc(e)
454
            
455
            out_col, = e.cols
456
            try: in_col = mapping[out_col]
457
            except KeyError:
458
                msg = 'Missing mapping for NOT NULL column '+out_col
459
                log_debug(msg)
460
                if default == None: on_error(SyntaxError(msg)) # required col
461
                remove_all_rows()
462
            else: ignore(in_col, None, e)
463
        except sql.CheckException, e:
464
            log_exc(e)
465
            
466
            ignore_cond(e.cond, e)
467
        except sql.InvalidValueException, e:
468
            log_exc(e)
469
            
470
            for in_col in mapping.values(): ignore(in_col, e.value, e)
471
        except sql.DatabaseErrors, e:
472
            log_exc(e)
473
            
474
            log_debug('No handler for exception')
475
            on_error(e)
476
            remove_all_rows()
477
        # after exception handled, rerun loop with additional constraints
478
    
479
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
480
        row_ct_ref[0] += cur.rowcount
481
    
482
    if is_function: pass # pkeys table already created
483
    elif has_joins:
484
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
485
        log_debug('Getting output table pkeys of existing/inserted rows')
486
        insert_into_pkeys(select_joins, pkeys_cols)
487
    else:
488
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
489
        
490
        log_debug('Getting input table pkeys of inserted rows')
491
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
492
        # since the SELECT query is identical to the one used in INSERT SELECT,
493
        # its rows will be retrieved in the same order.
494
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
495
            into=insert_in_pkeys)
496
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
497
        
498
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
499
            db, insert_in_pkeys)
500
        
501
        log_debug('Combining output and input pkeys in inserted order')
502
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
503
            {sql.row_num_col: sql_gen.join_same_not_null})]
504
        insert_into_pkeys(pkey_joins, pkeys_names)
505
        
506
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
507
    
508
    if not is_function:
509
        log_debug('Setting pkeys of missing rows to '
510
            +strings.as_tt(repr(default)))
511
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
512
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
513
            # must use join_same_not_null or query will take forever
514
        insert_into_pkeys(missing_rows_joins,
515
            [sql_gen.Col(in_pkey, full_in_table),
516
            sql_gen.NamedCol(out_pkey, default)])
517
    # otherwise, there is already an entry for every row
518
    
519
    assert (sql.table_row_count(db, into)
520
        == sql.table_row_count(db, full_in_table))
521
    
522
    sql.empty_temp(db, insert_in_tables+[full_in_table])
523
    
524
    srcs = []
525
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
526
    return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)