Project

General

Profile

1
# Database import/export
2

    
3
import exc
4
import dicts
5
import sql
6
import sql_gen
7
import strings
8
import util
9

    
10
##### Data cleanup
11

    
12
def cleanup_table(db, table, cols):
13
    table = sql_gen.as_Table(table)
14
    cols = map(sql_gen.as_Col, cols)
15
    
16
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
17
        +db.esc_value(r'\N')+')')
18
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
19
        for v in cols]
20
    
21
    sql.update(db, table, changes, in_place=True)
22

    
23
##### Error tracking
24

    
25
def track_data_error(db, errors_table, cols, value, error_code, error):
26
    '''
27
    @param errors_table If None, does nothing.
28
    '''
29
    if errors_table == None or cols == (): return
30
    
31
    for col in cols:
32
        try:
33
            sql.insert(db, errors_table, dict(column=col.name, value=value,
34
                error_code=error_code, error=error), recover=True,
35
                cacheable=True, log_level=4)
36
        except sql.DuplicateKeyException: pass
37

    
38
def cast(db, type_, col, errors_table=None):
39
    '''Casts an (unrenamed) column or value.
40
    If errors_table set and col has srcs, saves errors in errors_table (using
41
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
42
    @param col str|sql_gen.Col|sql_gen.Literal
43
    @param errors_table None|sql_gen.Table|str
44
    '''
45
    col = sql_gen.as_Col(col)
46
    
47
    # Don't convert exceptions to warnings for user-supplied constants
48
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
49
    
50
    assert not isinstance(col, sql_gen.NamedCol)
51
    
52
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
53
        and col.srcs != ())
54
    function_name = type_
55
    if save_errors:
56
        errors_table = sql_gen.as_Table(errors_table)
57
        
58
        srcs = map(sql_gen.to_name_only_col, col.srcs)
59
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
60
    function = db.TempFunction(function_name)
61
    
62
    while True:
63
        # Create function definition
64
        query = '''\
65
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
66
RETURNS '''+type_+'''
67
LANGUAGE plpgsql
68
'''
69
        if not save_errors: query += 'IMMUTABLE '
70
        query += '''\
71
STRICT
72
AS $$
73
BEGIN
74
    /* The explicit cast to the return type is needed to make the cast happen
75
    inside the try block. (Implicit casts to the return type happen at the end
76
    of the function, outside any block.) */
77
    RETURN value::'''+type_+''';
78
EXCEPTION
79
    WHEN data_exception THEN
80
'''
81
        if save_errors:
82
            errors_table_cols = map(sql_gen.Col,
83
                ['column', 'value', 'error_code', 'error'])
84
            query += '''\
85
        -- Save error in errors table.
86
        DECLARE
87
            error_code text := SQLSTATE;
88
            error text := SQLERRM;
89
            "column" text;
90
        BEGIN
91
            -- Insert the value and error for *each* source column.
92
            FOR "column" IN
93
'''+sql.mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
94
    order_by=None)+'''
95
            LOOP
96
                BEGIN
97
'''+sql.mk_insert_select(db, errors_table, errors_table_cols,
98
    sql_gen.Values(errors_table_cols).to_str(db))+''';
99
                EXCEPTION
100
                    WHEN unique_violation THEN NULL; -- continue to next row
101
                END;
102
            END LOOP;
103
        END;
104
'''
105
        else:
106
            query += '''\
107
        RAISE WARNING '%', SQLERRM;
108
'''
109
        query += '''\
110
        RETURN NULL;
111
END;
112
$$;
113
'''
114
        
115
        # Create function
116
        try:
117
            sql.run_query(db, query, recover=True, cacheable=True,
118
                log_ignore_excs=(sql.DuplicateException,))
119
            break # successful
120
        except sql.DuplicateException:
121
            function.name = sql.next_version(function.name)
122
            # try again with next version of name
123
    
124
    return sql_gen.FunctionCall(function, col)
125

    
126
def cast_temp_col(db, type_, col, errors_table=None):
127
    '''Like cast(), but creates a new column with the cast values if the input
128
    is a column.
129
    @return The new column or cast value
130
    '''
131
    def cast_(col): return cast(db, type_, col, errors_table)
132
    
133
    try: col = sql_gen.underlying_col(col)
134
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
135
    
136
    table = col.table
137
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
138
    expr = cast_(col)
139
    
140
    # Add column
141
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
142
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
143
    new_col.name = new_typed_col.name # propagate any renaming
144
    
145
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
146
    
147
    return new_col
148

    
149
def errors_table(db, table, if_exists=True):
150
    '''
151
    @param if_exists If set, returns None if the errors table doesn't exist
152
    @return None|sql_gen.Table
153
    '''
154
    table = sql_gen.as_Table(table)
155
    if table.srcs != (): table = table.srcs[0]
156
    
157
    errors_table = sql_gen.suffixed_table(table, '.errors')
158
    if if_exists and not sql.table_exists(db, errors_table): return None
159
    return errors_table
160

    
161
##### Import
162

    
163
def put(db, table, row, pkey_=None, row_ct_ref=None):
164
    '''Recovers from errors.
165
    Only works under PostgreSQL (uses INSERT RETURNING).
166
    '''
167
    row = sql_gen.ColDict(db, table, row)
168
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
169
    
170
    try:
171
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
172
        if row_ct_ref != None and cur.rowcount >= 0:
173
            row_ct_ref[0] += cur.rowcount
174
        return sql.value(cur)
175
    except sql.DuplicateKeyException, e:
176
        row = sql_gen.ColDict(db, table,
177
            util.dict_subset_right_join(row, e.cols))
178
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
179
            log_level=3.5))
180
    except sql.NullValueException: return None
181

    
182
def get(db, table, row, pkey, row_ct_ref=None, create=False):
183
    '''Recovers from errors'''
184
    try:
185
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
186
            recover=True))
187
    except StopIteration:
188
        if not create: raise
189
        return put(db, table, row, pkey, row_ct_ref) # insert new row
190

    
191
def is_func_result(col):
192
    return col.table.name.find('(') >= 0 and col.name == 'result'
193

    
194
def into_table_name(out_table, in_tables0, mapping, is_func):
195
    def in_col_str(in_col):
196
        in_col = sql_gen.remove_col_rename(in_col)
197
        if isinstance(in_col, sql_gen.Col):
198
            table = in_col.table
199
            if table == in_tables0:
200
                in_col = sql_gen.to_name_only_col(in_col)
201
            elif is_func_result(in_col): in_col = table # omit col name
202
        return str(in_col)
203
    
204
    str_ = str(out_table)
205
    if is_func:
206
        str_ += '('
207
        
208
        try: value_in_col = mapping['value']
209
        except KeyError:
210
            str_ += ', '.join((str(k)+'='+in_col_str(v)
211
                for k, v in mapping.iteritems()))
212
        else: str_ += in_col_str(value_in_col)
213
        
214
        str_ += ')'
215
    else:
216
        out_col = 'rank'
217
        try: in_col = mapping[out_col]
218
        except KeyError: str_ += '_pkeys'
219
        else: # has a rank column, so hierarchical
220
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
221
    return str_
222

    
223
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
224
    default=None, is_func=False, on_error=exc.raise_):
225
    '''Recovers from errors.
226
    Only works under PostgreSQL (uses INSERT RETURNING).
227
    IMPORTANT: Must be run at the *beginning* of a transaction.
228
    @param in_tables The main input table to select from, followed by a list of
229
        tables to join with it using the main input table's pkey
230
    @param mapping dict(out_table_col=in_table_col, ...)
231
        * out_table_col: str (*not* sql_gen.Col)
232
        * in_table_col: sql_gen.Col|literal-value
233
    @param into The table to contain the output and input pkeys.
234
        Defaults to `out_table.name+'_pkeys'`.
235
    @param default The *output* column to use as the pkey for missing rows.
236
        If this output column does not exist in the mapping, uses None.
237
    @param is_func Whether out_table is the name of a SQL function, not a table
238
    @return sql_gen.Col Where the output pkeys are made available
239
    '''
240
    out_table = sql_gen.as_Table(out_table)
241
    
242
    def log_debug(msg): db.log_debug(msg, level=1.5)
243
    def col_ustr(str_):
244
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
245
    
246
    log_debug('********** New iteration **********')
247
    log_debug('Inserting these input columns into '+strings.as_tt(
248
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
249
    
250
    is_function = sql.function_exists(db, out_table)
251
    
252
    if is_function: out_pkey = 'result'
253
    else: out_pkey = sql.pkey(db, out_table, recover=True)
254
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
255
    
256
    if mapping == {}: # need at least one column for INSERT SELECT
257
        mapping = {out_pkey: None} # ColDict will replace with default value
258
    
259
    # Create input joins from list of input tables
260
    in_tables_ = in_tables[:] # don't modify input!
261
    in_tables0 = in_tables_.pop(0) # first table is separate
262
    errors_table_ = errors_table(db, in_tables0)
263
    in_pkey = sql.pkey(db, in_tables0, recover=True)
264
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
265
    input_joins = [in_tables0]+[sql_gen.Join(v,
266
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
267
    
268
    if into == None:
269
        into = into_table_name(out_table, in_tables0, mapping, is_func)
270
    into = sql_gen.as_Table(into)
271
    
272
    # Set column sources
273
    in_cols = filter(sql_gen.is_table_col, mapping.values())
274
    for col in in_cols:
275
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
276
    
277
    log_debug('Joining together input tables into temp table')
278
    # Place in new table for speed and so don't modify input if values edited
279
    in_table = sql_gen.Table('in')
280
    mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
281
        in_cols, preserve=[in_pkey_col]))
282
    input_joins = [in_table]
283
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
284
    
285
    mapping = sql_gen.ColDict(db, out_table, mapping)
286
        # after applying dicts.join() because that returns a plain dict
287
    
288
    # Resolve default value column
289
    if default != None:
290
        try: default = mapping[default]
291
        except KeyError:
292
            db.log_debug('Default value column '
293
                +strings.as_tt(strings.repr_no_u(default))
294
                +' does not exist in mapping, falling back to None', level=2.1)
295
            default = None
296
    
297
    # Save default values for all rows since in_table may have rows deleted
298
    full_in_table = sql_gen.suffixed_table(in_table, '_full')
299
    full_in_table_cols = [in_pkey_col]
300
    if default != None:
301
        full_in_table_cols.append(default)
302
        default = sql_gen.with_table(default, full_in_table)
303
    sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
304
        order_by=None), into=full_in_table, add_pkey_=True)
305
    
306
    pkeys_names = [in_pkey, out_pkey]
307
    pkeys_cols = [in_pkey_col, out_pkey_col]
308
    
309
    pkeys_table_exists_ref = [False]
310
    def insert_into_pkeys(joins, cols, **kw_args):
311
        query = sql.mk_select(db, joins, cols, order_by=None)
312
        if pkeys_table_exists_ref[0]:
313
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
314
        else:
315
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
316
            pkeys_table_exists_ref[0] = True
317
    
318
    limit_ref = [None]
319
    insert_in_table = in_table
320
    insert_in_tables = [insert_in_table]
321
    conds = set()
322
    join_cols = sql_gen.ColDict(db, out_table)
323
    distinct_on = sql_gen.ColDict(db, out_table)
324
    def mk_main_select(joins, cols):
325
        conds_ = [(sql_gen.with_table(k, insert_in_table), v) for k, v in conds]
326
        return sql.mk_select(db, joins, cols, conds_, limit=limit_ref[0],
327
            order_by=None)
328
    
329
    exc_strs = set()
330
    def log_exc(e):
331
        e_str = exc.str_(e, first_line_only=True)
332
        log_debug('Caught exception: '+e_str)
333
        assert e_str not in exc_strs # avoid infinite loops
334
        exc_strs.add(e_str)
335
    
336
    def remove_all_rows():
337
        log_debug('Ignoring all rows')
338
        limit_ref[0] = 0 # just create an empty pkeys table
339
    
340
    def ignore_cond(cond, e):
341
        out_table_cols = sql_gen.ColDict(db, out_table)
342
        out_table_cols.update(util.dict_subset_right_join({},
343
            sql.table_cols(db, out_table)))
344
        
345
        in_cols = []
346
        cond = sql.map_expr(db, cond, mapping, in_cols)
347
        cond = sql.map_expr(db, cond, out_table_cols)
348
        
349
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
350
            e.cause.pgcode,
351
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
352
        
353
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
354
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
355
        sql.delete(db, insert_in_table, not_cond)
356
    
357
    not_null_cols = set()
358
    def ignore(in_col, value, e):
359
        in_col = sql_gen.with_table(in_col, insert_in_table)
360
        
361
        track_data_error(db, errors_table_, in_col.srcs, value,
362
            e.cause.pgcode, e.cause.pgerror)
363
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
364
            +strings.as_tt(repr(value)))
365
        
366
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
367
        if value != None and in_col not in not_null_cols:
368
            # Try just mapping the value to NULL
369
            sql.update(db, insert_in_table, [(in_col, None)],
370
                sql_gen.ColValueCond(in_col, value))
371
        else:
372
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
373
            if value == None: not_null_cols.add(in_col)
374
    
375
    def insert_pkeys_table(which):
376
        return sql_gen.Table(sql_gen.concat(in_table.name,
377
            '_insert_'+which+'_pkeys'))
378
    insert_out_pkeys = insert_pkeys_table('out')
379
    insert_in_pkeys = insert_pkeys_table('in')
380
    
381
    # Do inserts and selects
382
    while True:
383
        if limit_ref[0] == 0: # special case
384
            log_debug('Creating an empty pkeys table')
385
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
386
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
387
            break # don't do main case
388
        
389
        has_joins = join_cols != {}
390
        
391
        log_debug('Trying to insert new rows')
392
        
393
        # Prepare to insert new rows
394
        if is_function:
395
            log_debug('Calling function on input rows')
396
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
397
            func_call = sql_gen.NamedCol(out_pkey,
398
                sql_gen.FunctionCall(out_table, **args))
399
        else:
400
            insert_args = dict(recover=True, cacheable=False)
401
            if has_joins:
402
                insert_args.update(dict(ignore=True))
403
            else:
404
                insert_args.update(dict(returning=out_pkey,
405
                    into=insert_out_pkeys))
406
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
407
                c, insert_in_table) for c in mapping.values()])
408
        
409
        try:
410
            cur = None
411
            if is_function:
412
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
413
                    recover=True)
414
            else:
415
                cur = sql.insert_select(db, out_table, mapping.keys(),
416
                    main_select, **insert_args)
417
            break # insert successful
418
        except sql.MissingCastException, e:
419
            log_exc(e)
420
            
421
            out_col = e.col
422
            type_ = e.type
423
            
424
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
425
                +strings.as_tt(type_))
426
            in_col = mapping[out_col]
427
            while True:
428
                try:
429
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
430
                        errors_table_)
431
                    break # cast successful
432
                except sql.InvalidValueException, e:
433
                    log_exc(e)
434
                    
435
                    ignore(in_col, e.value, e)
436
        except sql.DuplicateKeyException, e:
437
            log_exc(e)
438
            
439
            # Different rows violating different unique constraints not
440
            # supported
441
            assert not join_cols
442
            assert not distinct_on
443
            
444
            distinct_on.update(util.dict_subset(mapping, e.cols))
445
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
446
            log_debug('Ignoring existing rows, comparing on these columns:\n'
447
                +strings.as_inline_table(join_cols, ustr=col_ustr))
448
            
449
            # Uniquify input table to avoid internal duplicate keys
450
            insert_in_table = sql.distinct_table(db, insert_in_table,
451
                join_cols.values())
452
            insert_in_tables.append(insert_in_table)
453
        except sql.NullValueException, e:
454
            log_exc(e)
455
            
456
            out_col, = e.cols
457
            try: in_col = mapping[out_col]
458
            except KeyError:
459
                msg = 'Missing mapping for NOT NULL column '+out_col
460
                log_debug(msg)
461
                if default == None: on_error(SyntaxError(msg)) # required col
462
                remove_all_rows()
463
            else: ignore(in_col, None, e)
464
        except sql.CheckException, e:
465
            log_exc(e)
466
            
467
            ignore_cond(e.cond, e)
468
        except sql.DatabaseErrors, e:
469
            log_exc(e)
470
            
471
            log_debug('No handler for exception')
472
            on_error(e)
473
            remove_all_rows()
474
        # after exception handled, rerun loop with additional constraints
475
    
476
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
477
        row_ct_ref[0] += cur.rowcount
478
    
479
    if is_function: pass # pkeys table already created
480
    elif has_joins:
481
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
482
        log_debug('Getting output table pkeys of existing/inserted rows')
483
        insert_into_pkeys(select_joins, pkeys_cols)
484
    else:
485
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
486
        
487
        log_debug('Getting input table pkeys of inserted rows')
488
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
489
        # since the SELECT query is identical to the one used in INSERT SELECT,
490
        # its rows will be retrieved in the same order.
491
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
492
            into=insert_in_pkeys)
493
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
494
        
495
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
496
            db, insert_in_pkeys)
497
        
498
        log_debug('Combining output and input pkeys in inserted order')
499
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
500
            {sql.row_num_col: sql_gen.join_same_not_null})]
501
        insert_into_pkeys(pkey_joins, pkeys_names)
502
        
503
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
504
    
505
    if not is_function:
506
        log_debug('Setting pkeys of missing rows to '
507
            +strings.as_tt(repr(default)))
508
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
509
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
510
            # must use join_same_not_null or query will take forever
511
        insert_into_pkeys(missing_rows_joins,
512
            [sql_gen.Col(in_pkey, full_in_table),
513
            sql_gen.NamedCol(out_pkey, default)])
514
    # otherwise, there is already an entry for every row
515
    
516
    assert (sql.table_row_count(db, into)
517
        == sql.table_row_count(db, full_in_table))
518
    
519
    sql.empty_temp(db, insert_in_tables+[full_in_table])
520
    
521
    srcs = []
522
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
523
    return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)