Project

General

Profile

1
# Database import/export
2

    
3
import exc
4
import dicts
5
import sql
6
import sql_gen
7
import strings
8
import util
9

    
10
##### Data cleanup
11

    
12
def cleanup_table(db, table, cols):
13
    table = sql_gen.as_Table(table)
14
    cols = map(sql_gen.as_Col, cols)
15
    
16
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
17
        +db.esc_value(r'\N')+')')
18
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
19
        for v in cols]
20
    
21
    sql.update(db, table, changes, in_place=True)
22

    
23
##### Error tracking
24

    
25
def track_data_error(db, errors_table, cols, value, error_code, error):
26
    '''
27
    @param errors_table If None, does nothing.
28
    '''
29
    if errors_table == None or cols == (): return
30
    
31
    for col in cols:
32
        try:
33
            sql.insert(db, errors_table, dict(column=col.name, value=value,
34
                error_code=error_code, error=error), recover=True,
35
                cacheable=True, log_level=4)
36
        except sql.DuplicateKeyException: pass
37

    
38
def cast(db, type_, col, errors_table=None):
39
    '''Casts an (unrenamed) column or value.
40
    If errors_table set and col has srcs, saves errors in errors_table (using
41
    col's srcs attr as the source columns) and converts errors to warnings.
42
    @param col str|sql_gen.Col|sql_gen.Literal
43
    @param errors_table None|sql_gen.Table|str
44
    '''
45
    col = sql_gen.as_Col(col)
46
    
47
    # Don't convert exceptions to warnings for user-supplied constants
48
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
49
    
50
    assert not isinstance(col, sql_gen.NamedCol)
51
    
52
    save_errors = (errors_table != None and isinstance(col, sql_gen.Col)
53
        and col.srcs != ())
54
    function_name = type_
55
    if save_errors:
56
        errors_table = sql_gen.as_Table(errors_table)
57
        
58
        srcs = map(sql_gen.to_name_only_col, col.srcs)
59
        function_name = str(sql_gen.FunctionCall(function_name, *srcs))
60
    function = db.TempFunction(function_name)
61
    
62
    while True:
63
        # Create function definition
64
        query = '''\
65
CREATE FUNCTION '''+function.to_str(db)+'''(value text)
66
RETURNS '''+type_+'''
67
LANGUAGE plpgsql
68
'''
69
        if not save_errors: query += 'IMMUTABLE '
70
        query += '''\
71
STRICT
72
AS $$
73
BEGIN
74
    /* The explicit cast to the return type is needed to make the cast happen
75
    inside the try block. (Implicit casts to the return type happen at the end
76
    of the function, outside any block.) */
77
    RETURN value::'''+type_+''';
78
EXCEPTION
79
    WHEN data_exception THEN
80
'''
81
        if save_errors:
82
            errors_table_cols = map(sql_gen.Col,
83
                ['column', 'value', 'error_code', 'error'])
84
            query += '''\
85
        -- Save error in errors table.
86
        DECLARE
87
            error_code text := SQLSTATE;
88
            error text := SQLERRM;
89
            "column" text;
90
        BEGIN
91
            -- Insert the value and error for *each* source column.
92
            FOR "column" IN
93
'''+sql.mk_select(db, sql_gen.NamedValues('c', None, [[c.name] for c in srcs]),
94
    order_by=None)+'''
95
            LOOP
96
                BEGIN
97
'''+sql.mk_insert_select(db, errors_table, errors_table_cols,
98
    sql_gen.Values(errors_table_cols).to_str(db))+''';
99
                EXCEPTION
100
                    WHEN unique_violation THEN NULL; -- continue to next row
101
                END;
102
            END LOOP;
103
        END;
104
        
105
'''
106
        query += '''\
107
        RAISE WARNING '%', SQLERRM;
108
        RETURN NULL;
109
END;
110
$$;
111
'''
112
        
113
        # Create function
114
        try:
115
            sql.run_query(db, query, recover=True, cacheable=True,
116
                log_ignore_excs=(sql.DuplicateException,))
117
            break # successful
118
        except sql.DuplicateException:
119
            function.name = sql.next_version(function.name)
120
            # try again with next version of name
121
    
122
    return sql_gen.FunctionCall(function, col)
123

    
124
def cast_temp_col(db, type_, col, errors_table=None):
125
    '''Like cast(), but creates a new column with the cast values if the input
126
    is a column.
127
    @return The new column or cast value
128
    '''
129
    def cast_(col): return cast(db, type_, col, errors_table)
130
    
131
    try: col = sql_gen.underlying_col(col)
132
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
133
    
134
    table = col.table
135
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
136
    expr = cast_(col)
137
    
138
    # Add column
139
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
140
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
141
    new_col.name = new_typed_col.name # propagate any renaming
142
    
143
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
144
    
145
    return new_col
146

    
147
def errors_table(db, table, if_exists=True):
148
    '''
149
    @param if_exists If set, returns None if the errors table doesn't exist
150
    @return None|sql_gen.Table
151
    '''
152
    table = sql_gen.as_Table(table)
153
    if table.srcs != (): table = table.srcs[0]
154
    
155
    errors_table = sql_gen.suffixed_table(table, '.errors')
156
    if if_exists and not sql.table_exists(db, errors_table): return None
157
    return errors_table
158

    
159
##### Import
160

    
161
def put(db, table, row, pkey_=None, row_ct_ref=None):
162
    '''Recovers from errors.
163
    Only works under PostgreSQL (uses INSERT RETURNING).
164
    '''
165
    row = sql_gen.ColDict(db, table, row)
166
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
167
    
168
    try:
169
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
170
        if row_ct_ref != None and cur.rowcount >= 0:
171
            row_ct_ref[0] += cur.rowcount
172
        return sql.value(cur)
173
    except sql.DuplicateKeyException, e:
174
        row = sql_gen.ColDict(db, table,
175
            util.dict_subset_right_join(row, e.cols))
176
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
177
            log_level=3.5))
178
    except sql.NullValueException: return None
179

    
180
def get(db, table, row, pkey, row_ct_ref=None, create=False):
181
    '''Recovers from errors'''
182
    try:
183
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
184
            recover=True))
185
    except StopIteration:
186
        if not create: raise
187
        return put(db, table, row, pkey, row_ct_ref) # insert new row
188

    
189
def is_func_result(col):
190
    return col.table.name.find('(') >= 0 and col.name == 'result'
191

    
192
def into_table_name(out_table, in_tables0, mapping, is_func):
193
    def in_col_str(in_col):
194
        in_col = sql_gen.remove_col_rename(in_col)
195
        if isinstance(in_col, sql_gen.Col):
196
            table = in_col.table
197
            if table == in_tables0:
198
                in_col = sql_gen.to_name_only_col(in_col)
199
            elif is_func_result(in_col): in_col = table # omit col name
200
        return str(in_col)
201
    
202
    str_ = str(out_table)
203
    if is_func:
204
        str_ += '('
205
        
206
        try: value_in_col = mapping['value']
207
        except KeyError:
208
            str_ += ', '.join((str(k)+'='+in_col_str(v)
209
                for k, v in mapping.iteritems()))
210
        else: str_ += in_col_str(value_in_col)
211
        
212
        str_ += ')'
213
    else:
214
        out_col = 'rank'
215
        try: in_col = mapping[out_col]
216
        except KeyError: str_ += '_pkeys'
217
        else: # has a rank column, so hierarchical
218
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
219
    return str_
220

    
221
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
222
    default=None, is_func=False, on_error=exc.raise_):
223
    '''Recovers from errors.
224
    Only works under PostgreSQL (uses INSERT RETURNING).
225
    IMPORTANT: Must be run at the *beginning* of a transaction.
226
    @param in_tables The main input table to select from, followed by a list of
227
        tables to join with it using the main input table's pkey
228
    @param mapping dict(out_table_col=in_table_col, ...)
229
        * out_table_col: str (*not* sql_gen.Col)
230
        * in_table_col: sql_gen.Col|literal-value
231
    @param into The table to contain the output and input pkeys.
232
        Defaults to `out_table.name+'_pkeys'`.
233
    @param default The *output* column to use as the pkey for missing rows.
234
        If this output column does not exist in the mapping, uses None.
235
    @param is_func Whether out_table is the name of a SQL function, not a table
236
    @return sql_gen.Col Where the output pkeys are made available
237
    '''
238
    out_table = sql_gen.as_Table(out_table)
239
    
240
    def log_debug(msg): db.log_debug(msg, level=1.5)
241
    def col_ustr(str_):
242
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
243
    
244
    log_debug('********** New iteration **********')
245
    log_debug('Inserting these input columns into '+strings.as_tt(
246
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
247
    
248
    is_function = sql.function_exists(db, out_table)
249
    
250
    if is_function: out_pkey = 'result'
251
    else: out_pkey = sql.pkey(db, out_table, recover=True)
252
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
253
    
254
    if mapping == {}: # need at least one column for INSERT SELECT
255
        mapping = {out_pkey: None} # ColDict will replace with default value
256
    
257
    # Create input joins from list of input tables
258
    in_tables_ = in_tables[:] # don't modify input!
259
    in_tables0 = in_tables_.pop(0) # first table is separate
260
    errors_table_ = errors_table(db, in_tables0)
261
    in_pkey = sql.pkey(db, in_tables0, recover=True)
262
    in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
263
    input_joins = [in_tables0]+[sql_gen.Join(v,
264
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
265
    
266
    if into == None:
267
        into = into_table_name(out_table, in_tables0, mapping, is_func)
268
    into = sql_gen.as_Table(into)
269
    
270
    # Set column sources
271
    in_cols = filter(sql_gen.is_table_col, mapping.values())
272
    for col in in_cols:
273
        if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
274
    
275
    log_debug('Joining together input tables into temp table')
276
    # Place in new table for speed and so don't modify input if values edited
277
    in_table = sql_gen.Table('in')
278
    mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
279
        in_cols, preserve=[in_pkey_col]))
280
    input_joins = [in_table]
281
    db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
282
    
283
    mapping = sql_gen.ColDict(db, out_table, mapping)
284
        # after applying dicts.join() because that returns a plain dict
285
    
286
    # Resolve default value column
287
    if default != None:
288
        try: default = mapping[default]
289
        except KeyError:
290
            db.log_debug('Default value column '
291
                +strings.as_tt(strings.repr_no_u(default))
292
                +' does not exist in mapping, falling back to None', level=2.1)
293
            default = None
294
    
295
    # Save default values for all rows since in_table may have rows deleted
296
    full_in_table = sql_gen.suffixed_table(in_table, '_full')
297
    full_in_table_cols = [in_pkey_col]
298
    if default != None:
299
        full_in_table_cols.append(default)
300
        default = sql_gen.with_table(default, full_in_table)
301
    sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
302
        order_by=None), into=full_in_table, add_pkey_=True)
303
    
304
    pkeys_names = [in_pkey, out_pkey]
305
    pkeys_cols = [in_pkey_col, out_pkey_col]
306
    
307
    pkeys_table_exists_ref = [False]
308
    def insert_into_pkeys(joins, cols, **kw_args):
309
        query = sql.mk_select(db, joins, cols, order_by=None)
310
        if pkeys_table_exists_ref[0]:
311
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
312
        else:
313
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
314
            pkeys_table_exists_ref[0] = True
315
    
316
    limit_ref = [None]
317
    insert_in_table = in_table
318
    insert_in_tables = [insert_in_table]
319
    conds = set()
320
    join_cols = sql_gen.ColDict(db, out_table)
321
    distinct_on = sql_gen.ColDict(db, out_table)
322
    def mk_main_select(joins, cols):
323
        conds_ = [(sql_gen.with_table(k, insert_in_table), v) for k, v in conds]
324
        return sql.mk_select(db, joins, cols, conds_, limit=limit_ref[0],
325
            order_by=None)
326
    
327
    exc_strs = set()
328
    def log_exc(e):
329
        e_str = exc.str_(e, first_line_only=True)
330
        log_debug('Caught exception: '+e_str)
331
        assert e_str not in exc_strs # avoid infinite loops
332
        exc_strs.add(e_str)
333
    
334
    def remove_all_rows():
335
        log_debug('Ignoring all rows')
336
        limit_ref[0] = 0 # just create an empty pkeys table
337
    
338
    def ignore_cond(cond, e):
339
        out_table_cols = sql_gen.ColDict(db, out_table)
340
        out_table_cols.update(util.dict_subset_right_join({},
341
            sql.table_cols(db, out_table)))
342
        
343
        in_cols = []
344
        cond = sql.map_expr(db, cond, mapping, in_cols)
345
        cond = sql.map_expr(db, cond, out_table_cols)
346
        
347
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
348
            e.cause.pgcode,
349
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
350
        
351
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
352
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
353
        sql.delete(db, insert_in_table, not_cond)
354
    
355
    not_null_cols = set()
356
    def ignore(in_col, value, e):
357
        in_col = sql_gen.with_table(in_col, insert_in_table)
358
        
359
        track_data_error(db, errors_table_, in_col.srcs, value,
360
            e.cause.pgcode, e.cause.pgerror)
361
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
362
            +strings.as_tt(repr(value)))
363
        
364
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
365
        if value != None and in_col not in not_null_cols:
366
            # Try just mapping the value to NULL
367
            sql.update(db, insert_in_table, [(in_col, None)],
368
                sql_gen.ColValueCond(in_col, value))
369
        else:
370
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
371
            if value == None: not_null_cols.add(in_col)
372
    
373
    def insert_pkeys_table(which):
374
        return sql_gen.Table(sql_gen.concat(in_table.name,
375
            '_insert_'+which+'_pkeys'))
376
    insert_out_pkeys = insert_pkeys_table('out')
377
    insert_in_pkeys = insert_pkeys_table('in')
378
    
379
    # Do inserts and selects
380
    while True:
381
        if limit_ref[0] == 0: # special case
382
            log_debug('Creating an empty pkeys table')
383
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
384
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
385
            break # don't do main case
386
        
387
        has_joins = join_cols != {}
388
        
389
        log_debug('Trying to insert new rows')
390
        
391
        # Prepare to insert new rows
392
        if is_function:
393
            log_debug('Calling function on input rows')
394
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
395
            func_call = sql_gen.NamedCol(out_pkey,
396
                sql_gen.FunctionCall(out_table, **args))
397
        else:
398
            insert_args = dict(recover=True, cacheable=False)
399
            if has_joins:
400
                insert_args.update(dict(ignore=True))
401
            else:
402
                insert_args.update(dict(returning=out_pkey,
403
                    into=insert_out_pkeys))
404
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
405
                c, insert_in_table) for c in mapping.values()])
406
        
407
        try:
408
            cur = None
409
            if is_function:
410
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
411
                    recover=True)
412
            else:
413
                cur = sql.insert_select(db, out_table, mapping.keys(),
414
                    main_select, **insert_args)
415
            break # insert successful
416
        except sql.MissingCastException, e:
417
            log_exc(e)
418
            
419
            out_col = e.col
420
            type_ = e.type
421
            
422
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
423
                +strings.as_tt(type_))
424
            in_col = mapping[out_col]
425
            while True:
426
                try:
427
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
428
                        errors_table_)
429
                    break # cast successful
430
                except sql.InvalidValueException, e:
431
                    log_exc(e)
432
                    
433
                    ignore(in_col, e.value, e)
434
        except sql.DuplicateKeyException, e:
435
            log_exc(e)
436
            
437
            # Different rows violating different unique constraints not
438
            # supported
439
            assert not join_cols
440
            assert not distinct_on
441
            
442
            distinct_on.update(util.dict_subset(mapping, e.cols))
443
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
444
            log_debug('Ignoring existing rows, comparing on these columns:\n'
445
                +strings.as_inline_table(join_cols, ustr=col_ustr))
446
            
447
            # Uniquify input table to avoid internal duplicate keys
448
            insert_in_table = sql.distinct_table(db, insert_in_table,
449
                filter(sql_gen.is_table_col, distinct_on.values()))
450
            insert_in_tables.append(insert_in_table)
451
        except sql.NullValueException, e:
452
            log_exc(e)
453
            
454
            out_col, = e.cols
455
            try: in_col = mapping[out_col]
456
            except KeyError:
457
                msg = 'Missing mapping for NOT NULL column '+out_col
458
                log_debug(msg)
459
                if default == None: on_error(SyntaxError(msg)) # required col
460
                remove_all_rows()
461
            else: ignore(in_col, None, e)
462
        except sql.CheckException, e:
463
            log_exc(e)
464
            
465
            ignore_cond(e.cond, e)
466
        except sql.DatabaseErrors, e:
467
            log_exc(e)
468
            
469
            log_debug('No handler for exception')
470
            on_error(e)
471
            remove_all_rows()
472
        # after exception handled, rerun loop with additional constraints
473
    
474
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
475
        row_ct_ref[0] += cur.rowcount
476
    
477
    if is_function: pass # pkeys table already created
478
    elif has_joins:
479
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
480
        log_debug('Getting output table pkeys of existing/inserted rows')
481
        insert_into_pkeys(select_joins, pkeys_cols)
482
    else:
483
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
484
        
485
        log_debug('Getting input table pkeys of inserted rows')
486
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
487
        # since the SELECT query is identical to the one used in INSERT SELECT,
488
        # its rows will be retrieved in the same order.
489
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
490
            into=insert_in_pkeys)
491
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
492
        
493
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
494
            db, insert_in_pkeys)
495
        
496
        log_debug('Combining output and input pkeys in inserted order')
497
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
498
            {sql.row_num_col: sql_gen.join_same_not_null})]
499
        insert_into_pkeys(pkey_joins, pkeys_names)
500
        
501
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
502
    
503
    if not is_function:
504
        log_debug('Setting pkeys of missing rows to '
505
            +strings.as_tt(repr(default)))
506
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
507
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
508
            # must use join_same_not_null or query will take forever
509
        insert_into_pkeys(missing_rows_joins,
510
            [sql_gen.Col(in_pkey, full_in_table),
511
            sql_gen.NamedCol(out_pkey, default)])
512
    # otherwise, there is already an entry for every row
513
    
514
    assert (sql.table_row_count(db, into)
515
        == sql.table_row_count(db, full_in_table))
516
    
517
    sql.empty_temp(db, insert_in_tables+[full_in_table])
518
    
519
    srcs = []
520
    if is_func: srcs = sql_gen.cols_srcs(in_cols)
521
    return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)