Project

General

Profile

1
# Database import/export
2

    
3
import copy
4
import csv
5
import operator
6
import warnings
7
import sys
8

    
9
import csvs
10
import exc
11
import dicts
12
import sql
13
import sql_gen
14
import streams
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
# Can't use built-in SyntaxError because it stringifies to only the first line
21
class SyntaxError(Exception): pass
22

    
23
##### Data cleanup
24

    
25
def table_nulls_mapped__set(db, table):
26
    assert isinstance(table, sql_gen.Table)
27
    sql.run_query(db, 'SELECT util.table_nulls_mapped__set('
28
        +sql_gen.table2regclass_text(db, table)+')')
29

    
30
def table_nulls_mapped__get(db, table):
31
    assert isinstance(table, sql_gen.Table)
32
    return sql.value(sql.run_query(db, 'SELECT util.table_nulls_mapped__get('
33
        +sql_gen.table2regclass_text(db, table)+')'))
34

    
35
null_strs = ['', '-', r'\N', 'NULL', 'UNKNOWN', 'nulo']
36

    
37
def cleanup_table(db, table):
38
    '''idempotent'''
39
    table = sql_gen.as_Table(table)
40
    assert sql.table_exists(db, table)
41
    
42
    if table_nulls_mapped__get(db, table): return # already cleaned up
43
    
44
    cols = filter(lambda c: sql_gen.is_text_col(db, c),
45
        sql.table_cols(db, table))
46
    try: pkey_col = sql.table_pkey_col(db, table)
47
    except sql.DoesNotExistException: pass
48
    else:
49
        try: cols.remove(pkey_col)
50
        except ValueError: pass
51
    if not cols: return
52
    
53
    db.log_debug('Cleaning up table', level=1.5)
54
    
55
    expr = 'trim(both from %s)'
56
    for null in null_strs: expr = 'nullif('+expr+', '+db.esc_value(null)+')'
57
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db))) for v in cols]
58
    
59
    while True:
60
        try:
61
            sql.update(db, table, changes, in_place=True, recover=True)
62
            break # successful
63
        except sql.NullValueException, e:
64
            db.log_debug('Caught exception: '+exc.str_(e))
65
            col, = e.cols
66
            sql.drop_not_null(db, col)
67
    
68
    db.log_debug('Vacuuming and reanalyzing table', level=1.5)
69
    sql.vacuum(db, table)
70
    
71
    table_nulls_mapped__set(db, table)
72

    
73
##### Error tracking
74

    
75
def track_data_error(db, errors_table, cols, value, error_code, error):
76
    '''
77
    @param errors_table If None, does nothing.
78
    '''
79
    if errors_table == None: return
80
    
81
    col_names = [c.name for c in cols]
82
    if not col_names: col_names = [None] # need at least one entry
83
    for col_name in col_names:
84
        try:
85
            sql.insert(db, errors_table, dict(column=col_name, value=value,
86
                error_code=error_code, error=error), recover=True,
87
                cacheable=True, log_level=4)
88
        except sql.DuplicateKeyException: pass
89

    
90
class ExcToErrorsTable(sql_gen.ExcToWarning):
91
    '''Handles an exception by saving it or converting it to a warning.'''
92
    def __init__(self, return_, srcs, errors_table, value=None):
93
        '''
94
        @param return_ See sql_gen.ExcToWarning
95
        @param srcs The column names for the errors table
96
        @param errors_table None|sql_gen.Table
97
        @param value The value (or an expression for it) that caused the error
98
        @pre The invalid value must be in a local variable "value" of type text.
99
        '''
100
        sql_gen.ExcToWarning.__init__(self, return_)
101
        
102
        value = sql_gen.as_Code(value)
103
        
104
        self.srcs = srcs
105
        self.errors_table = errors_table
106
        self.value = value
107
    
108
    def to_str(self, db):
109
        if not self.srcs or self.errors_table == None:
110
            return sql_gen.ExcToWarning.to_str(self, db)
111
        
112
        errors_table_cols = map(sql_gen.Col,
113
            ['column', 'value', 'error_code', 'error'])
114
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
115
            [[c.name] for c in self.srcs]), order_by=None)
116
        insert_query = sql.mk_insert_select(db, self.errors_table,
117
            errors_table_cols,
118
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
119
        return '''\
120
-- Save error in errors table.
121
DECLARE
122
    error_code text := SQLSTATE;
123
    error text := SQLERRM;
124
    value text := '''+self.value.to_str(db)+''';
125
    "column" text;
126
BEGIN
127
    -- Insert the value and error for *each* source column.
128
'''+strings.indent(sql_gen.RowExcIgnore(None, col_names_query, insert_query,
129
    row_var=errors_table_cols[0]).to_str(db))+'''
130
END;
131

    
132
'''+self.return_.to_str(db)
133

    
134
def data_exception_handler(*args, **kw_args):
135
    '''Handles a data_exception by saving it or converting it to a warning.
136
    For params, see ExcToErrorsTable().
137
    '''
138
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
139

    
140
def cast(db, type_, col, errors_table=None):
141
    '''Casts an (unrenamed) column or value.
142
    If errors_table set and col has srcs, saves errors in errors_table (using
143
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
144
    @param col str|sql_gen.Col|sql_gen.Literal
145
    @param errors_table None|sql_gen.Table|str
146
    '''
147
    col = sql_gen.as_Col(col)
148
    
149
    # Don't convert exceptions to warnings for user-supplied constants
150
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
151
    
152
    assert not isinstance(col, sql_gen.NamedCol)
153
    
154
    function_name = strings.first_word(type_)
155
    srcs = col.srcs
156
    save_errors = errors_table != None and srcs
157
    if save_errors: # function will be unique for the given srcs
158
        function_name = strings.ustr(sql_gen.FunctionCall(function_name,
159
            *map(sql_gen.to_name_only_col, srcs)))
160
    function = db.TempFunction(function_name)
161
    
162
    # Create function definition
163
    modifiers = 'STRICT'
164
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
165
    value_param = sql_gen.FunctionParam('value', 'anyelement')
166
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
167
        value_param.name)
168
    body = sql_gen.CustomCode(handler.to_str(db, '''\
169
/* The explicit cast to the return type is needed to make the cast happen
170
inside the try block. (Implicit casts to the return type happen at the end
171
of the function, outside any block.) */
172
RETURN '''+sql_gen.Cast(type_, sql_gen.CustomCode('value')).to_str(db)+''';
173
'''))
174
    body.lang='plpgsql'
175
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
176
        [value_param], modifiers))
177
    
178
    return sql_gen.FunctionCall(function, col)
179

    
180
def func_wrapper_exception_handler(db, return_, args, errors_table):
181
    '''Handles a function call's data_exceptions.
182
    Supports PL/Python functions.
183
    @param return_ See data_exception_handler()
184
    @param args [arg...] Function call's args
185
    @param errors_table See data_exception_handler()
186
    '''
187
    args = filter(sql_gen.has_srcs, args)
188
    
189
    srcs = sql_gen.cross_join_srcs(args)
190
    value = sql_gen.merge_not_null(db, ',', args)
191
    return sql_gen.NestedExcHandler(
192
        data_exception_handler(return_, srcs, errors_table, value)
193
        , sql_gen.plpythonu_error_handler
194
        )
195

    
196
def cast_temp_col(db, type_, col, errors_table=None):
197
    '''Like cast(), but creates a new column with the cast values if the input
198
    is a column.
199
    @return The new column or cast value
200
    '''
201
    def cast_(col): return cast(db, type_, col, errors_table)
202
    
203
    try: col = sql_gen.underlying_col(col)
204
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
205
    
206
    table = col.table
207
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
208
    expr = cast_(col)
209
    
210
    # Add column
211
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
212
    sql.add_col(db, table, new_typed_col, comment=strings.urepr(col)+'::'+type_)
213
    new_col.name = new_typed_col.name # propagate any renaming
214
    
215
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
216
    
217
    return new_col
218

    
219
def errors_table(db, table, if_exists=True):
220
    '''
221
    @param if_exists If set, returns None if the errors table doesn't exist
222
    @return None|sql_gen.Table
223
    '''
224
    table = sql_gen.as_Table(table)
225
    if table.srcs != (): table = table.srcs[0]
226
    
227
    errors_table = sql_gen.suffixed_table(table, '.errors')
228
    if if_exists and not sql.table_exists(db, errors_table): return None
229
    return errors_table
230

    
231
def mk_errors_table(db, table):
232
    errors_table_ = errors_table(db, table, if_exists=False)
233
    if sql.table_exists(db, errors_table_, cacheable=False): return
234
    
235
    typed_cols = [
236
        sql_gen.TypedCol('column', 'text'),
237
        sql_gen.TypedCol('value', 'text'),
238
        sql_gen.TypedCol('error_code', 'character varying(5)', nullable=False),
239
        sql_gen.TypedCol('error', 'text', nullable=False),
240
        ]
241
    sql.create_table(db, errors_table_, typed_cols, has_pkey=False)
242
    index_cols = ['column', sql_gen.CustomCode('md5(value)'), 'error_code',
243
        sql_gen.CustomCode('md5(error)')]
244
    sql.add_index(db, index_cols, errors_table_, unique=True)
245

    
246
##### Import
247

    
248
row_num_col_def = copy.copy(sql.row_num_col_def)
249
row_num_col_def.name = 'row_num'
250
row_num_col_def.type = 'integer'
251

    
252
def append_csv(db, table, reader, header):
253
    def esc_name_(name): return sql.esc_name(db, name)
254
    
255
    def log(msg, level=1): db.log_debug(msg, level)
256
    
257
    # Wrap in standardizing stream
258
    cols_ct = len(header)
259
    stream = csvs.InputRewriter(csvs.ProgressInputFilter(
260
        csvs.ColCtFilter(reader, cols_ct), sys.stderr, n=1000))
261
    #streams.copy(stream, sys.stderr) # to troubleshoot copy_expert() errors
262
    dialect = stream.dialect # use default dialect
263
    
264
    # Create COPY FROM statement
265
    if header == sql.table_col_names(db, table): cols_str = ''
266
    else: cols_str =' ('+(', '.join(map(esc_name_, header)))+')'
267
    copy_from = ('COPY '+table.to_str(db)+cols_str+' FROM STDIN DELIMITER '
268
        +db.esc_value(dialect.delimiter)+' NULL '+db.esc_value(''))
269
    assert not csvs.is_tsv(dialect)
270
    copy_from += ' CSV'
271
    if dialect.quoting != csv.QUOTE_NONE:
272
        quote_str = db.esc_value(dialect.quotechar)
273
        copy_from += ' QUOTE '+quote_str
274
        if dialect.doublequote: copy_from += ' ESCAPE '+quote_str
275
    copy_from += ';\n'
276
    
277
    log(copy_from, level=2)
278
    try: db.db.cursor().copy_expert(copy_from, stream)
279
    except Exception, e: sql.parse_exception(db, e, recover=True)
280

    
281
def import_csv(db, table, reader, header):
282
    def log(msg, level=1): db.log_debug(msg, level)
283
    
284
    # Get format info
285
    col_names = map(strings.to_unicode, header)
286
    for i, col in enumerate(col_names): # replace empty column names
287
        if col == '': col_names[i] = 'column_'+str(i)
288
    
289
    # Select schema and escape names
290
    def esc_name(name): return db.esc_name(name)
291
    
292
    typed_cols = [sql_gen.TypedCol(v, 'text') for v in col_names]
293
    typed_cols.insert(0, row_num_col_def)
294
    header.insert(0, row_num_col_def.name)
295
    reader = csvs.RowNumFilter(reader)
296
    
297
    log('Creating table')
298
    # Note that this is not rolled back if the import fails. Instead, it is
299
    # cached, and will not be re-run if the import is retried.
300
    sql.create_table(db, table, typed_cols, has_pkey=False, col_indexes=False)
301
    
302
    # Free memory used by deleted (rolled back) rows from any failed import.
303
    # This MUST be run so that the rows will be stored in inserted order, and
304
    # the row_num added after import will match up with the CSV's row order.
305
    sql.truncate(db, table)
306
    
307
    # Load the data
308
    def load(): append_csv(db, table, reader, header)
309
    sql.with_savepoint(db, load)
310
    
311
    cleanup_table(db, table)
312

    
313
def put(db, table, row, pkey_=None, row_ct_ref=None, on_error=exc.reraise):
314
    '''Recovers from errors.
315
    Only works under PostgreSQL (uses INSERT RETURNING).
316
    '''
317
    return put_table(db, table, [], row, row_ct_ref, on_error=on_error)
318

    
319
def get(db, table, row, pkey, row_ct_ref=None, create=False):
320
    '''Recovers from errors'''
321
    try:
322
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
323
            recover=True))
324
    except StopIteration:
325
        if not create: raise
326
        return put(db, table, row, pkey, row_ct_ref) # insert new row
327

    
328
def is_func_result(col):
329
    return col.table.name.find('(') >= 0 and col.name == 'result'
330

    
331
def into_table_name(out_table, in_tables0, mapping, is_func):
332
    def in_col_str(in_col):
333
        in_col = sql_gen.remove_col_rename(in_col)
334
        if isinstance(in_col, sql_gen.Col):
335
            table = in_col.table
336
            if table == in_tables0:
337
                in_col = sql_gen.to_name_only_col(in_col)
338
            elif is_func_result(in_col): in_col = table # omit col name
339
        return strings.ustr(in_col)
340
    
341
    str_ = strings.ustr(out_table)
342
    if is_func:
343
        str_ += '('
344
        
345
        try: value_in_col = mapping['value']
346
        except KeyError:
347
            str_ += ', '.join((strings.ustr(k)+'='+in_col_str(v)
348
                for k, v in mapping.iteritems()))
349
        else: str_ += in_col_str(value_in_col)
350
        
351
        str_ += ')'
352
    else:
353
        out_col = 'rank'
354
        try: in_col = mapping[out_col]
355
        except KeyError: str_ += '_pkeys'
356
        else: # has a rank column, so hierarchical
357
            str_ += '['+strings.ustr(out_col)+'='+in_col_str(in_col)+']'
358
    return str_
359

    
360
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, default=None,
361
    col_defaults={}, on_error=exc.reraise):
362
    '''Recovers from errors.
363
    Only works under PostgreSQL (uses INSERT RETURNING).
364
    
365
    Warning: This function's normalizing algorithm does not support database
366
    triggers that populate fields covered by the unique constraint used to do
367
    the DISTINCT ON. Such fields must be populated by the mappings instead.
368
    (Other unique constraints and other non-unique fields are not affected by
369
    this restriction on triggers. Note that the primary key will normally not be
370
    the DISTINCT ON constraint, so trigger-populated natural keys are supported
371
    *unless* the input table contains duplicate rows for some generated keys.)
372
    
373
    Note that much of the complexity of the normalizing algorithm is due to
374
    PostgreSQL (and other DB systems) not having a native command for
375
    INSERT ON DUPLICATE SELECT (wiki.vegpath.org/INSERT_ON_DUPLICATE_SELECT).
376
    For PostgreSQL 9.1+, this can now be emulated using INSTEAD OF triggers.
377
    For earlier versions, you instead have to use this function.
378
    
379
    @param in_tables The main input table to select from, followed by a list of
380
        tables to join with it using the main input table's pkey
381
    @param mapping dict(out_table_col=in_table_col, ...)
382
        * out_table_col: str (*not* sql_gen.Col)
383
        * in_table_col: sql_gen.Col|literal-value
384
    @param default The *output* column to use as the pkey for missing rows.
385
        If this output column does not exist in the mapping, uses None.
386
        Note that this will be used for *all* missing rows, regardless of which
387
        error caused them not to be inserted.
388
    @param col_defaults Default values for required columns.
389
    @return sql_gen.Col Where the output pkeys are made available
390
    '''
391
    import psycopg2.extensions
392
    
393
    # Special handling for functions with hstore params
394
    if out_table == '_map':
395
        import psycopg2.extras
396
        psycopg2.extras.register_hstore(db.db)
397
        
398
        # Parse args
399
        try: value = mapping.pop('value')
400
        except KeyError: return None # value required
401
        
402
        mapping = dict([(k, sql_gen.get_value(v))
403
            for k, v in mapping.iteritems()]) # unwrap literal value
404
        mapping = dict(map=mapping, value=value) # non-value params -> hstore
405
    
406
    out_table = sql_gen.as_Table(out_table)
407
    
408
    def log_debug(msg): db.log_debug(msg, level=1.5)
409
    def col_ustr(str_):
410
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
411
    
412
    log_debug('********** New iteration **********')
413
    log_debug('Inserting these input columns into '+strings.as_tt(
414
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
415
    
416
    is_function = sql.function_exists(db, out_table)
417
    
418
    if is_function: row_ct_ref = None # only track inserted rows
419
    
420
    # Warn if inserting empty table rows
421
    if not mapping and not is_function: # functions with no args OK
422
        warnings.warn(UserWarning('Inserting empty table row(s)'))
423
    
424
    if is_function: out_pkey = 'result'
425
    else: out_pkey = sql.pkey_name(db, out_table, recover=True)
426
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
427
    
428
    in_tables_ = copy.copy(in_tables) # don't modify input!
429
    try: in_tables0 = in_tables_.pop(0) # first table is separate
430
    except IndexError: in_tables0 = None
431
    else:
432
        in_pkey = sql.pkey_name(db, in_tables0, recover=True)
433
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
434
    
435
    # Determine if can use optimization for only literal values
436
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
437
        mapping.values()), False)
438
    is_literals_or_function = is_literals or is_function
439
    
440
    if in_tables0 == None: errors_table_ = None
441
    else: errors_table_ = errors_table(db, in_tables0)
442
    
443
    # Create input joins from list of input tables
444
    input_joins = [in_tables0]+[sql_gen.Join(v,
445
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
446
    
447
    orig_mapping = mapping.copy()
448
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
449
        mapping = {out_pkey: None} # ColDict will replace with default value
450
    
451
    if not is_literals:
452
        into = sql_gen.as_Table(into_table_name(out_table, in_tables0, mapping,
453
            is_function))
454
        # Ensure into's out_pkey is different from in_pkey by prepending "out."
455
        if is_function: into_out_pkey = out_pkey
456
        else: into_out_pkey = 'out.'+out_pkey
457
        
458
        # Set column sources
459
        in_cols = filter(sql_gen.is_table_col, mapping.values())
460
        for col in in_cols:
461
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
462
        
463
        log_debug('Joining together input tables into temp table')
464
        # Place in new table so don't modify input and for speed
465
        in_table = sql_gen.Table('in')
466
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
467
            in_cols, preserve=[in_pkey_col]))
468
        input_joins = [in_table]
469
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
470
    
471
    # Wrap mapping in a sql_gen.ColDict.
472
    # sql_gen.ColDict sanitizes both keys and values passed into it.
473
    # Do after applying dicts.join() because that returns a plain dict.
474
    mapping = sql_gen.ColDict(db, out_table, mapping)
475
    
476
    # Save all rows since in_table may have rows deleted
477
    if is_literals: pass
478
    elif is_function: full_in_table = in_table
479
    else:
480
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
481
        sql.copy_table(db, in_table, full_in_table)
482
    
483
    pkeys_table_exists_ref = [False]
484
    def insert_into_pkeys(query, **kw_args):
485
        if pkeys_table_exists_ref[0]:
486
            sql.insert_select(db, into, [in_pkey, into_out_pkey], query,
487
                **kw_args)
488
        else:
489
            kw_args.setdefault('add_pkey_', True)
490
            # don't warn if can't create pkey, because this just indicates that,
491
            # at some point in the import tree, it used a set-returning function
492
            kw_args.setdefault('add_pkey_warn', False)
493
            
494
            sql.run_query_into(db, query, into=into, **kw_args)
495
            pkeys_table_exists_ref[0] = True
496
    
497
    def mk_main_select(joins, cols): return sql.mk_select(db, joins, cols)
498
    
499
    if is_literals: insert_in_table = None
500
    else:
501
        insert_in_table = in_table
502
        insert_in_tables = [insert_in_table]
503
    join_cols = sql_gen.ColDict(db, out_table)
504
    join_custom_cond = None
505
    
506
    exc_strs = set()
507
    def log_exc(e):
508
        e_str = exc.str_(e, first_line_only=True)
509
        log_debug('Caught exception: '+e_str)
510
        if e_str in exc_strs: # avoid infinite loops
511
            log_debug('Exception already seen, handler broken')
512
            on_error(e)
513
            remove_all_rows()
514
            return False
515
        else: exc_strs.add(e_str)
516
        return True
517
    
518
    ignore_all_ref = [False]
519
    def remove_all_rows():
520
        log_debug('Ignoring all rows')
521
        ignore_all_ref[0] = True # just return the default value column
522
    
523
    def handle_unknown_exc(e):
524
        log_debug('No handler for exception')
525
        on_error(e)
526
        remove_all_rows()
527
    
528
    def ensure_cond(cond, e, passed=False, failed=False):
529
        '''
530
        @param passed at least one row passed the constraint
531
        @param failed at least one row failed the constraint
532
        '''
533
        if is_literals: # we know the constraint was applied exactly once
534
            if passed: pass
535
            elif failed: remove_all_rows()
536
            else: raise NotImplementedError()
537
        else:
538
            if not is_function:
539
                out_table_cols = sql_gen.ColDict(db, out_table)
540
                out_table_cols.update(util.dict_subset_right_join({},
541
                    sql.table_col_names(db, out_table)))
542
            
543
            in_cols = []
544
            cond = strings.ustr(cond)
545
            orig_cond = cond
546
            cond = sql_gen.map_expr(db, cond, mapping, in_cols)
547
            if not is_function:
548
                cond = sql_gen.map_expr(db, cond, out_table_cols)
549
            
550
            log_debug('Ignoring rows that do not satisfy '+strings.as_tt(cond))
551
            cur = None
552
            if cond == sql_gen.false_expr:
553
                assert failed
554
                remove_all_rows()
555
            elif cond == sql_gen.true_expr: assert passed
556
            else:
557
                while True:
558
                    not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
559
                    try:
560
                        cur = sql.delete(db, insert_in_table, not_cond)
561
                        break
562
                    except sql.DoesNotExistException, e:
563
                        if e.type != 'column': raise
564
                        
565
                        last_cond = cond
566
                        cond = sql_gen.map_expr(db, cond, {e.name: None})
567
                        if cond == last_cond: raise # not fixable
568
            
569
            # If any rows failed cond
570
            if failed or cur != None and cur.rowcount > 0:
571
                track_data_error(db, errors_table_,
572
                    sql_gen.cross_join_srcs(in_cols), None, e.cause.pgcode,
573
                    strings.ensure_newl(strings.ustr(e.cause.pgerror))
574
                    +'condition: '+orig_cond+'\ntranslated condition: '+cond)
575
    
576
    not_null_cols = set()
577
    def ignore(in_col, value, e):
578
        if sql_gen.is_table_col(in_col):
579
            in_col = sql_gen.with_table(in_col, insert_in_table)
580
            
581
            track_data_error(db, errors_table_, in_col.srcs, value,
582
                e.cause.pgcode, e.cause.pgerror)
583
            
584
            sql.add_index(db, in_col, insert_in_table) # enable fast filtering
585
            if value != None and in_col not in not_null_cols:
586
                log_debug('Replacing invalid value '
587
                    +strings.as_tt(strings.urepr(value))+' with NULL in column '
588
                    +strings.as_tt(in_col.to_str(db)))
589
                sql.update(db, insert_in_table, [(in_col, None)],
590
                    sql_gen.ColValueCond(in_col, value))
591
            else:
592
                log_debug('Ignoring rows with '+strings.as_tt(in_col.to_str(db))
593
                    +' = '+strings.as_tt(strings.urepr(value)))
594
                sql.delete(db, insert_in_table,
595
                    sql_gen.ColValueCond(in_col, value))
596
                if value == None: not_null_cols.add(in_col)
597
        else:
598
            assert isinstance(in_col, sql_gen.NamedCol)
599
            in_value = sql_gen.remove_col_rename(in_col)
600
            assert sql_gen.is_literal(in_value)
601
            if value == in_value.value:
602
                if value != None:
603
                    log_debug('Replacing invalid literal '
604
                        +strings.as_tt(in_col.to_str(db))+' with NULL')
605
                    mapping[in_col.name] = None
606
                else:
607
                    remove_all_rows()
608
            # otherwise, all columns were being ignore()d because the specific
609
            # column couldn't be identified, and this was not the invalid column
610
    
611
    if not is_literals:
612
        def insert_pkeys_table(which):
613
            return sql_gen.Table(sql_gen.concat(in_table.name,
614
                '_insert_'+which+'_pkeys'))
615
        insert_out_pkeys = insert_pkeys_table('out')
616
        insert_in_pkeys = insert_pkeys_table('in')
617
    
618
    def mk_func_call():
619
        args = dict(((k.name, v) for k, v in mapping.iteritems()))
620
        return sql_gen.FunctionCall(out_table, **args), args
621
    
622
    def handle_MissingCastException(e):
623
        if not log_exc(e): return False
624
        
625
        type_ = e.type
626
        if e.col == None: out_cols = mapping.keys()
627
        else: out_cols = [e.col]
628
        
629
        for out_col in out_cols:
630
            log_debug('Casting '+strings.as_tt(strings.repr_no_u(out_col))
631
                +' input to '+strings.as_tt(type_))
632
            in_col = mapping[out_col]
633
            while True:
634
                try:
635
                    cast_col = cast_temp_col(db, type_, in_col, errors_table_)
636
                    mapping[out_col] = cast_col
637
                    if out_col in join_cols: join_cols[out_col] = cast_col
638
                    break # cast successful
639
                except sql.InvalidValueException, e:
640
                    if not log_exc(e): return False
641
                    
642
                    ignore(in_col, e.value, e)
643
        
644
        return True
645
    
646
    missing_msg = None
647
    
648
    # Do inserts and selects
649
    while True:
650
        has_joins = join_cols != {}
651
        
652
        if ignore_all_ref[0]: break # unrecoverable error, so don't do main case
653
        
654
        # Prepare to insert new rows
655
        if is_function:
656
            if is_literals:
657
                log_debug('Calling function')
658
                func_call, args = mk_func_call()
659
        else:
660
            log_debug('Trying to insert new rows')
661
            insert_args = dict(recover=True, cacheable=False)
662
            if has_joins:
663
                insert_args.update(dict(ignore=True))
664
            else:
665
                insert_args.update(dict(returning=out_pkey))
666
                if not is_literals:
667
                    insert_args.update(dict(into=insert_out_pkeys))
668
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
669
                c, insert_in_table) for c in mapping.values()])
670
        
671
        try:
672
            cur = None
673
            if is_function:
674
                if is_literals:
675
                    cur = sql.select(db, fields=[func_call], recover=True,
676
                        cacheable=True)
677
                else:
678
                    log_debug('Defining wrapper function')
679
                    
680
                    func_call, args = mk_func_call()
681
                    func_call = sql_gen.NamedCol(into_out_pkey, func_call)
682
                    
683
                    # Create empty pkeys table so its row type can be used
684
                    insert_into_pkeys(sql.mk_select(db, input_joins,
685
                        [in_pkey_col, func_call], limit=0), add_pkey_=False,
686
                        recover=True)
687
                    result_type = db.col_info(sql_gen.Col(into_out_pkey,
688
                        into)).type
689
                    
690
                    ## Create error handling wrapper function
691
                    
692
                    wrapper = db.TempFunction(sql_gen.concat(into.name,
693
                        '_wrap'))
694
                    
695
                    select_cols = [in_pkey_col]+args.values()
696
                    row_var = copy.copy(sql_gen.row_var)
697
                    row_var.set_srcs([in_table])
698
                    in_pkey_var = sql_gen.Col(in_pkey, row_var)
699
                    
700
                    args = dict(((k, sql_gen.with_table(v, row_var))
701
                        for k, v in args.iteritems()))
702
                    func_call = sql_gen.FunctionCall(out_table, **args)
703
                    
704
                    def mk_return(result):
705
                        return sql_gen.ReturnQuery(sql.mk_select(db,
706
                            fields=[in_pkey_var, result], explain=False))
707
                    exc_handler = func_wrapper_exception_handler(db,
708
                        mk_return(sql_gen.Cast(result_type, None)),
709
                        args.values(), errors_table_)
710
                    
711
                    sql.define_func(db, sql_gen.FunctionDef(wrapper,
712
                        sql_gen.SetOf(into),
713
                        sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
714
                            sql.mk_select(db, input_joins),
715
                            mk_return(func_call), exc_handler=exc_handler)
716
                        ))
717
                    wrapper_table = sql_gen.FunctionCall(wrapper)
718
                    
719
                    log_debug('Calling function')
720
                    insert_into_pkeys(sql.mk_select(db, wrapper_table,
721
                        order_by=None), recover=True, cacheable=False)
722
                    sql.add_pkey_or_index(db, into)
723
            else:
724
                cur = sql.insert_select(db, out_table, mapping.keys(),
725
                    main_select, **insert_args)
726
            break # insert successful
727
        except sql.MissingCastException, e:
728
            if not handle_MissingCastException(e): break
729
        except sql.DuplicateKeyException, e:
730
            if not log_exc(e): break
731
            
732
            # Different rows violating different unique constraints not
733
            # supported
734
            assert not join_cols
735
            
736
            join_custom_cond = e.cond
737
            if e.cond != None: ensure_cond(e.cond, e, passed=True)
738
            
739
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
740
            log_debug('Ignoring existing rows, comparing on these columns:\n'
741
                +strings.as_inline_table(join_cols, ustr=col_ustr))
742
            
743
            if is_literals:
744
                return sql.value(sql.select(db, out_table, [out_pkey_col],
745
                    join_cols, order_by=None))
746
            
747
            # Uniquify and filter input table to avoid (most) duplicate keys
748
            # (Additional duplicates may be added concurrently and will be
749
            # filtered out separately upon insert.)
750
            while True:
751
                try:
752
                    insert_in_table = sql.distinct_table(db, insert_in_table,
753
                        join_cols.values(), [insert_in_table,
754
                        sql_gen.Join(out_table, join_cols, sql_gen.filter_out,
755
                        e.cond)])
756
                    insert_in_tables.append(insert_in_table)
757
                    break # insert successful
758
                except sql.MissingCastException, e1: # don't modify outer e
759
                    if not handle_MissingCastException(e1): break
760
        except sql.NullValueException, e:
761
            if not log_exc(e): break
762
            
763
            out_col, = e.cols
764
            try: in_col = mapping[out_col]
765
            except KeyError, e:
766
                try: in_col = mapping[out_col] = col_defaults[out_col]
767
                except KeyError:
768
                    missing_msg = 'Missing mapping for NOT NULL column '+out_col
769
                    log_debug(missing_msg)
770
                    remove_all_rows()
771
            else: ignore(in_col, None, e)
772
        except sql.CheckException, e:
773
            if not log_exc(e): break
774
            
775
            ensure_cond(e.cond, e, failed=True)
776
        except sql.InvalidValueException, e:
777
            if not log_exc(e): break
778
            
779
            for in_col in mapping.values(): ignore(in_col, e.value, e)
780
        except psycopg2.extensions.TransactionRollbackError, e:
781
            if not log_exc(e): break
782
            # retry
783
        except sql.DatabaseErrors, e:
784
            if not log_exc(e): break
785
            
786
            handle_unknown_exc(e)
787
        # after exception handled, rerun loop with additional constraints
788
    
789
    # Resolve default value column
790
    if default != None:
791
        if ignore_all_ref[0]: mapping.update(orig_mapping) # use input cols
792
        try: default = mapping[default]
793
        except KeyError:
794
            db.log_debug('Default value column '
795
                +strings.as_tt(strings.repr_no_u(default))
796
                +' does not exist in mapping, falling back to None', level=2.1)
797
            default = None
798
        else: default = sql_gen.remove_col_rename(default)
799
    
800
    if missing_msg != None and default == None:
801
        warnings.warn(UserWarning(missing_msg))
802
        # not an error because sometimes the mappings include
803
        # extra tables which aren't used by the dataset
804
    
805
    # Handle unrecoverable errors
806
    if ignore_all_ref[0]:
807
        log_debug('Returning default: '+strings.as_tt(strings.urepr(default)))
808
        return default
809
    
810
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
811
        row_ct_ref[0] += cur.rowcount
812
    
813
    if is_literals: return sql.value_or_none(cur) # support multi-row functions
814
    
815
    if is_function: pass # pkeys table already created
816
    elif has_joins:
817
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols,
818
            custom_cond=join_custom_cond)]
819
        log_debug('Getting output table pkeys of existing/inserted rows')
820
        insert_into_pkeys(sql.mk_select(db, select_joins, [in_pkey_col,
821
            sql_gen.NamedCol(into_out_pkey, out_pkey_col)], order_by=None))
822
    else:
823
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
824
        
825
        log_debug('Getting input table pkeys of inserted rows')
826
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
827
        # since the SELECT query is identical to the one used in INSERT SELECT,
828
        # its rows will be retrieved in the same order.
829
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
830
            into=insert_in_pkeys)
831
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
832
        
833
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
834
            db, insert_in_pkeys)
835
        
836
        log_debug('Combining output and input pkeys in inserted order')
837
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
838
            {sql.row_num_col: sql_gen.join_same_not_null})]
839
        in_col = sql_gen.Col(in_pkey, insert_in_pkeys)
840
        out_col = sql_gen.NamedCol(into_out_pkey,
841
            sql_gen.Col(out_pkey, insert_out_pkeys))
842
        insert_into_pkeys(sql.mk_select(db, pkey_joins, [in_col, out_col],
843
            order_by=None))
844
        
845
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
846
    
847
    if not is_function: # is_function doesn't leave holes
848
        log_debug('Setting pkeys of missing rows to '
849
            +strings.as_tt(strings.urepr(default)))
850
        
851
        full_in_pkey_col = sql_gen.Col(in_pkey, full_in_table)
852
        if sql_gen.is_table_col(default):
853
            default = sql_gen.with_table(default, full_in_table)
854
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
855
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
856
            # must use join_same_not_null or query will take forever
857
        
858
        insert_args = dict(order_by=None)
859
        if not sql.table_has_pkey(db, full_in_table): # in_table has duplicates
860
            insert_args.update(dict(distinct_on=[full_in_pkey_col]))
861
        
862
        insert_into_pkeys(sql.mk_select(db, missing_rows_joins,
863
            [full_in_pkey_col, sql_gen.NamedCol(into_out_pkey, default)],
864
            **insert_args))
865
    # otherwise, there is already an entry for every row
866
    
867
    sql.empty_temp(db, insert_in_tables+[full_in_table])
868
    
869
    srcs = []
870
    if is_function: srcs = sql_gen.cols_srcs(in_cols)
871
    return sql_gen.Col(into_out_pkey, into, srcs)
(37-37/49)