Project

General

Profile

1
# Database import/export
2

    
3
import exc
4
import dicts
5
import sql
6
import sql_gen
7
import strings
8
import util
9

    
10
##### Data cleanup
11

    
12
def cleanup_table(db, table, cols):
13
    table = sql_gen.as_Table(table)
14
    cols = map(sql_gen.as_Col, cols)
15
    
16
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
17
        +db.esc_value(r'\N')+')')
18
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
19
        for v in cols]
20
    
21
    sql.update(db, table, changes, in_place=True)
22

    
23
##### Error tracking
24

    
25
def track_data_error(db, errors_table, cols, value, error_code, error):
26
    '''
27
    @param errors_table If None, does nothing.
28
    '''
29
    if errors_table == None or cols == (): return
30
    
31
    for col in cols:
32
        try:
33
            sql.insert(db, errors_table, dict(column=col.name, value=value,
34
                error_code=error_code, error=error), recover=True,
35
                cacheable=True, log_level=4)
36
        except sql.DuplicateKeyException: pass
37

    
38
def cast(db, type_, col, errors_table=None):
39
    '''Casts an (unrenamed) column or value.
40
    If errors_table set and col has srcs, saves errors in errors_table (using
41
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
42
    @param col str|sql_gen.Col|sql_gen.Literal
43
    @param errors_table None|sql_gen.Table|str
44
    '''
45
    col = sql_gen.as_Col(col)
46
    
47
    # Don't convert exceptions to warnings for user-supplied constants
48
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
49
    
50
    assert not isinstance(col, sql_gen.NamedCol)
51
    
52
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
53
        and col.srcs != ())
54
    function_name = type_
55
    if save_errors:
56
        errors_table = sql_gen.as_Table(errors_table)
57
        
58
        srcs = map(sql_gen.to_name_only_col, col.srcs)
59
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
60
    function = db.TempFunction(function_name)
61
    
62
    while True:
63
        # Create function definition
64
        query = '''\
65
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
66
RETURNS '''+type_+'''
67
LANGUAGE plpgsql
68
'''
69
        if not save_errors: query += 'IMMUTABLE '
70
        query += '''\
71
STRICT
72
AS $$
73
BEGIN
74
    /* The explicit cast to the return type is needed to make the cast happen
75
    inside the try block. (Implicit casts to the return type happen at the end
76
    of the function, outside any block.) */
77
    RETURN value::'''+type_+''';
78
EXCEPTION
79
    WHEN data_exception THEN
80
'''
81
        if save_errors:
82
            errors_table_cols = map(sql_gen.Col,
83
                ['column', 'value', 'error_code', 'error'])
84
            query += '''\
85
        -- Save error in errors table.
86
        DECLARE
87
            error_code text := SQLSTATE;
88
            error text := SQLERRM;
89
            "column" text;
90
        BEGIN
91
            -- Insert the value and error for *each* source column.
92
            FOR "column" IN
93
'''+sql.mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
94
    order_by=None)+'''
95
            LOOP
96
                BEGIN
97
'''+sql.mk_insert_select(db, errors_table, errors_table_cols,
98
    sql_gen.Values(errors_table_cols).to_str(db))+''';
99
                EXCEPTION
100
                    WHEN unique_violation THEN NULL; -- continue to next row
101
                END;
102
            END LOOP;
103
        END;
104
'''
105
        else:
106
            query += '''\
107
        RAISE WARNING '%', SQLERRM;
108
'''
109
        query += '''\
110
        RETURN NULL;
111
END;
112
$$;
113
'''
114
        
115
        # Create function
116
        try:
117
            sql.run_query(db, query, recover=True, cacheable=True,
118
                log_ignore_excs=(sql.DuplicateException,))
119
            break # successful
120
        except sql.DuplicateException:
121
            function.name = sql.next_version(function.name)
122
            # try again with next version of name
123
    
124
    return sql_gen.FunctionCall(function, col)
125

    
126
def cast_temp_col(db, type_, col, errors_table=None):
127
    '''Like cast(), but creates a new column with the cast values if the input
128
    is a column.
129
    @return The new column or cast value
130
    '''
131
    def cast_(col): return cast(db, type_, col, errors_table)
132
    
133
    try: col = sql_gen.underlying_col(col)
134
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
135
    
136
    table = col.table
137
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
138
    expr = cast_(col)
139
    
140
    # Add column
141
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
142
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
143
    new_col.name = new_typed_col.name # propagate any renaming
144
    
145
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
146
    
147
    return new_col
148

    
149
def errors_table(db, table, if_exists=True):
150
    '''
151
    @param if_exists If set, returns None if the errors table doesn't exist
152
    @return None|sql_gen.Table
153
    '''
154
    table = sql_gen.as_Table(table)
155
    if table.srcs != (): table = table.srcs[0]
156
    
157
    errors_table = sql_gen.suffixed_table(table, '.errors')
158
    if if_exists and not sql.table_exists(db, errors_table): return None
159
    return errors_table
160

    
161
##### Import
162

    
163
def put(db, table, row, pkey_=None, row_ct_ref=None):
164
    '''Recovers from errors.
165
    Only works under PostgreSQL (uses INSERT RETURNING).
166
    '''
167
    row = sql_gen.ColDict(db, table, row)
168
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
169
    
170
    try:
171
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
172
        if row_ct_ref != None and cur.rowcount >= 0:
173
            row_ct_ref[0] += cur.rowcount
174
        return sql.value(cur)
175
    except sql.DuplicateKeyException, e:
176
        row = sql_gen.ColDict(db, table,
177
            util.dict_subset_right_join(row, e.cols))
178
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
179
            log_level=3.5))
180
    except sql.NullValueException: return None
181

    
182
def get(db, table, row, pkey, row_ct_ref=None, create=False):
183
    '''Recovers from errors'''
184
    try:
185
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
186
            recover=True))
187
    except StopIteration:
188
        if not create: raise
189
        return put(db, table, row, pkey, row_ct_ref) # insert new row
190

    
191
def is_func_result(col):
192
    return col.table.name.find('(') >= 0 and col.name == 'result'
193

    
194
def into_table_name(out_table, in_tables0, mapping, is_func):
195
    def in_col_str(in_col):
196
        in_col = sql_gen.remove_col_rename(in_col)
197
        if isinstance(in_col, sql_gen.Col):
198
            table = in_col.table
199
            if table == in_tables0:
200
                in_col = sql_gen.to_name_only_col(in_col)
201
            elif is_func_result(in_col): in_col = table # omit col name
202
        return str(in_col)
203
    
204
    str_ = str(out_table)
205
    if is_func:
206
        str_ += '('
207
        
208
        try: value_in_col = mapping['value']
209
        except KeyError:
210
            str_ += ', '.join((str(k)+'='+in_col_str(v)
211
                for k, v in mapping.iteritems()))
212
        else: str_ += in_col_str(value_in_col)
213
        
214
        str_ += ')'
215
    else:
216
        out_col = 'rank'
217
        try: in_col = mapping[out_col]
218
        except KeyError: str_ += '_pkeys'
219
        else: # has a rank column, so hierarchical
220
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
221
    return str_
222

    
223
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
224
    default=None, is_func=False, on_error=exc.raise_):
225
    '''Recovers from errors.
226
    Only works under PostgreSQL (uses INSERT RETURNING).
227
    IMPORTANT: Must be run at the *beginning* of a transaction.
228
    @param in_tables The main input table to select from, followed by a list of
229
        tables to join with it using the main input table's pkey
230
    @param mapping dict(out_table_col=in_table_col, ...)
231
        * out_table_col: str (*not* sql_gen.Col)
232
        * in_table_col: sql_gen.Col|literal-value
233
    @param into The table to contain the output and input pkeys.
234
        Defaults to `out_table.name+'_pkeys'`.
235
    @param default The *output* column to use as the pkey for missing rows.
236
        If this output column does not exist in the mapping, uses None.
237
    @param is_func Whether out_table is the name of a SQL function, not a table
238
    @return sql_gen.Col Where the output pkeys are made available
239
    '''
240
    out_table = sql_gen.as_Table(out_table)
241
    
242
    def log_debug(msg): db.log_debug(msg, level=1.5)
243
    def col_ustr(str_):
244
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
245
    
246
    log_debug('********** New iteration **********')
247
    log_debug('Inserting these input columns into '+strings.as_tt(
248
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
249
    
250
    is_function = sql.function_exists(db, out_table)
251
    
252
    if is_function: out_pkey = 'result'
253
    else: out_pkey = sql.pkey(db, out_table, recover=True)
254
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
255
    
256
    if mapping == {}: # need at least one column for INSERT SELECT
257
        mapping = {out_pkey: None} # ColDict will replace with default value
258
    
259
    # Create input joins from list of input tables
260
    in_tables_ = in_tables[:] # don't modify input!
261
    in_tables0 = in_tables_.pop(0) # first table is separate
262
    errors_table_ = errors_table(db, in_tables0)
263
    in_pkey = sql.pkey(db, in_tables0, recover=True)
264
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
265
    input_joins = [in_tables0]+[sql_gen.Join(v,
266
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
267
    
268
    if into == None:
269
        into = into_table_name(out_table, in_tables0, mapping, is_func)
270
    into = sql_gen.as_Table(into)
271
    
272
    # Set column sources
273
    in_cols = filter(sql_gen.is_table_col, mapping.values())
274
    for col in in_cols:
275
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
276
    
277
    log_debug('Joining together input tables into temp table')
278
    # Place in new table for speed and so don't modify input if values edited
279
    in_table = sql_gen.Table('in')
280
    mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
281
        in_cols, preserve=[in_pkey_col]))
282
    input_joins = [in_table]
283
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
284
    
285
    mapping = sql_gen.ColDict(db, out_table, mapping)
286
        # after applying dicts.join() because that returns a plain dict
287
    
288
    # Resolve default value column
289
    if default != None:
290
        try: default = mapping[default]
291
        except KeyError:
292
            db.log_debug('Default value column '
293
                +strings.as_tt(strings.repr_no_u(default))
294
                +' does not exist in mapping, falling back to None', level=2.1)
295
            default = None
296
    
297
    # Save default values for all rows since in_table may have rows deleted
298
    if is_function: full_in_table = in_table
299
    else:
300
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
301
        full_in_table_cols = [in_pkey_col]
302
        if default != None:
303
            full_in_table_cols.append(default)
304
            default = sql_gen.with_table(default, full_in_table)
305
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
306
            order_by=None), into=full_in_table, add_pkey_=True)
307
    
308
    pkeys_names = [in_pkey, out_pkey]
309
    pkeys_cols = [in_pkey_col, out_pkey_col]
310
    
311
    pkeys_table_exists_ref = [False]
312
    def insert_into_pkeys(joins, cols, **kw_args):
313
        query = sql.mk_select(db, joins, cols, order_by=None)
314
        if pkeys_table_exists_ref[0]:
315
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
316
        else:
317
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
318
            pkeys_table_exists_ref[0] = True
319
    
320
    limit_ref = [None]
321
    def mk_main_select(joins, cols):
322
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
323
    
324
    insert_in_table = in_table
325
    insert_in_tables = [insert_in_table]
326
    join_cols = sql_gen.ColDict(db, out_table)
327
    
328
    exc_strs = set()
329
    def log_exc(e):
330
        e_str = exc.str_(e, first_line_only=True)
331
        log_debug('Caught exception: '+e_str)
332
        assert e_str not in exc_strs # avoid infinite loops
333
        exc_strs.add(e_str)
334
    
335
    def remove_all_rows():
336
        log_debug('Ignoring all rows')
337
        limit_ref[0] = 0 # just create an empty pkeys table
338
    
339
    def ignore_cond(cond, e):
340
        out_table_cols = sql_gen.ColDict(db, out_table)
341
        out_table_cols.update(util.dict_subset_right_join({},
342
            sql.table_cols(db, out_table)))
343
        
344
        in_cols = []
345
        cond = sql.map_expr(db, cond, mapping, in_cols)
346
        cond = sql.map_expr(db, cond, out_table_cols)
347
        
348
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
349
            e.cause.pgcode,
350
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
351
        
352
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
353
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
354
        sql.delete(db, insert_in_table, not_cond)
355
    
356
    not_null_cols = set()
357
    def ignore(in_col, value, e):
358
        in_col = sql_gen.with_table(in_col, insert_in_table)
359
        
360
        track_data_error(db, errors_table_, in_col.srcs, value,
361
            e.cause.pgcode, e.cause.pgerror)
362
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
363
            +strings.as_tt(repr(value)))
364
        
365
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
366
        if value != None and in_col not in not_null_cols:
367
            # Try just mapping the value to NULL
368
            sql.update(db, insert_in_table, [(in_col, None)],
369
                sql_gen.ColValueCond(in_col, value))
370
        else:
371
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
372
            if value == None: not_null_cols.add(in_col)
373
    
374
    def insert_pkeys_table(which):
375
        return sql_gen.Table(sql_gen.concat(in_table.name,
376
            '_insert_'+which+'_pkeys'))
377
    insert_out_pkeys = insert_pkeys_table('out')
378
    insert_in_pkeys = insert_pkeys_table('in')
379
    
380
    # Do inserts and selects
381
    while True:
382
        if limit_ref[0] == 0: # special case
383
            log_debug('Creating an empty pkeys table')
384
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
385
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
386
            break # don't do main case
387
        
388
        has_joins = join_cols != {}
389
        
390
        log_debug('Trying to insert new rows')
391
        
392
        # Prepare to insert new rows
393
        if is_function:
394
            log_debug('Calling function on input rows')
395
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
396
            func_call = sql_gen.NamedCol(out_pkey,
397
                sql_gen.FunctionCall(out_table, **args))
398
        else:
399
            insert_args = dict(recover=True, cacheable=False)
400
            if has_joins:
401
                insert_args.update(dict(ignore=True))
402
            else:
403
                insert_args.update(dict(returning=out_pkey,
404
                    into=insert_out_pkeys))
405
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
406
                c, insert_in_table) for c in mapping.values()])
407
        
408
        try:
409
            cur = None
410
            if is_function:
411
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
412
                    recover=True)
413
            else:
414
                cur = sql.insert_select(db, out_table, mapping.keys(),
415
                    main_select, **insert_args)
416
            break # insert successful
417
        except sql.MissingCastException, e:
418
            log_exc(e)
419
            
420
            out_col = e.col
421
            type_ = e.type
422
            
423
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
424
                +strings.as_tt(type_))
425
            in_col = mapping[out_col]
426
            while True:
427
                try:
428
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
429
                        errors_table_)
430
                    break # cast successful
431
                except sql.InvalidValueException, e:
432
                    log_exc(e)
433
                    
434
                    ignore(in_col, e.value, e)
435
        except sql.DuplicateKeyException, e:
436
            log_exc(e)
437
            
438
            # Different rows violating different unique constraints not
439
            # supported
440
            assert not join_cols
441
            
442
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
443
            log_debug('Ignoring existing rows, comparing on these columns:\n'
444
                +strings.as_inline_table(join_cols, ustr=col_ustr))
445
            
446
            # Uniquify input table to avoid internal duplicate keys
447
            insert_in_table = sql.distinct_table(db, insert_in_table,
448
                join_cols.values())
449
            insert_in_tables.append(insert_in_table)
450
        except sql.NullValueException, e:
451
            log_exc(e)
452
            
453
            out_col, = e.cols
454
            try: in_col = mapping[out_col]
455
            except KeyError:
456
                msg = 'Missing mapping for NOT NULL column '+out_col
457
                log_debug(msg)
458
                if default == None: on_error(SyntaxError(msg)) # required col
459
                remove_all_rows()
460
            else: ignore(in_col, None, e)
461
        except sql.CheckException, e:
462
            log_exc(e)
463
            
464
            ignore_cond(e.cond, e)
465
        except sql.InvalidValueException, e:
466
            log_exc(e)
467
            
468
            for in_col in mapping.values(): ignore(in_col, e.value, e)
469
        except sql.DatabaseErrors, e:
470
            log_exc(e)
471
            
472
            log_debug('No handler for exception')
473
            on_error(e)
474
            remove_all_rows()
475
        # after exception handled, rerun loop with additional constraints
476
    
477
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
478
        row_ct_ref[0] += cur.rowcount
479
    
480
    if is_function: pass # pkeys table already created
481
    elif has_joins:
482
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
483
        log_debug('Getting output table pkeys of existing/inserted rows')
484
        insert_into_pkeys(select_joins, pkeys_cols)
485
    else:
486
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
487
        
488
        log_debug('Getting input table pkeys of inserted rows')
489
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
490
        # since the SELECT query is identical to the one used in INSERT SELECT,
491
        # its rows will be retrieved in the same order.
492
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
493
            into=insert_in_pkeys)
494
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
495
        
496
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
497
            db, insert_in_pkeys)
498
        
499
        log_debug('Combining output and input pkeys in inserted order')
500
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
501
            {sql.row_num_col: sql_gen.join_same_not_null})]
502
        insert_into_pkeys(pkey_joins, pkeys_names)
503
        
504
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
505
    
506
    if not is_function:
507
        log_debug('Setting pkeys of missing rows to '
508
            +strings.as_tt(repr(default)))
509
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
510
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
511
            # must use join_same_not_null or query will take forever
512
        insert_into_pkeys(missing_rows_joins,
513
            [sql_gen.Col(in_pkey, full_in_table),
514
            sql_gen.NamedCol(out_pkey, default)])
515
    # otherwise, there is already an entry for every row
516
    
517
    assert (sql.table_row_count(db, into)
518
        == sql.table_row_count(db, full_in_table))
519
    
520
    sql.empty_temp(db, insert_in_tables+[full_in_table])
521
    
522
    srcs = []
523
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
524
    return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)