Project

General

Profile

1
# Database import/export
2

    
3
import copy
4
import csv
5
import operator
6
import warnings
7
import sys
8

    
9
import csvs
10
import exc
11
import dicts
12
import sql
13
import sql_gen
14
import streams
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
# Can't use built-in SyntaxError because it stringifies to only the first line
21
class SyntaxError(Exception): pass
22

    
23
##### Data cleanup
24

    
25
def table_nulls_mapped__set(db, table):
26
    assert isinstance(table, sql_gen.Table)
27
    sql.run_query(db, 'SELECT util.table_nulls_mapped__set('
28
        +sql_gen.table2regclass_text(db, table)+')')
29

    
30
def table_nulls_mapped__get(db, table):
31
    assert isinstance(table, sql_gen.Table)
32
    return sql.value(sql.run_query(db, 'SELECT util.table_nulls_mapped__get('
33
        +sql_gen.table2regclass_text(db, table)+')'))
34

    
35
null_strs = ['', '-', r'\N', 'NULL', 'UNKNOWN', 'nulo']
36

    
37
def cleanup_table(db, table):
38
    '''idempotent'''
39
    table = sql_gen.as_Table(table)
40
    assert sql.table_exists(db, table)
41
    
42
    if table_nulls_mapped__get(db, table): return # already cleaned up
43
    
44
    cols = filter(lambda c: sql_gen.is_text_col(db, c),
45
        sql.table_cols(db, table))
46
    try: pkey_col = sql.table_pkey_col(db, table)
47
    except sql.DoesNotExistException: pass
48
    else:
49
        try: cols.remove(pkey_col)
50
        except ValueError: pass
51
    if not cols: return
52
    
53
    db.log_debug('Cleaning up table', level=1.5)
54
    
55
    expr = 'trim(both from %s)'
56
    for null in null_strs: expr = 'nullif('+expr+', '+db.esc_value(null)+')'
57
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db))) for v in cols]
58
    
59
    while True:
60
        try:
61
            sql.update(db, table, changes, in_place=True, recover=True)
62
            break # successful
63
        except sql.NullValueException, e:
64
            db.log_debug('Caught exception: '+exc.str_(e))
65
            col, = e.cols
66
            sql.drop_not_null(db, col)
67
    
68
    db.log_debug('Vacuuming and reanalyzing table', level=1.5)
69
    sql.vacuum(db, table)
70
    
71
    table_nulls_mapped__set(db, table)
72

    
73
##### Error tracking
74

    
75
def track_data_error(db, errors_table, cols, value, error_code, error):
76
    '''
77
    @param errors_table If None, does nothing.
78
    '''
79
    if errors_table == None: return
80
    
81
    col_names = [c.name for c in cols]
82
    if not col_names: col_names = [None] # need at least one entry
83
    for col_name in col_names:
84
        try:
85
            sql.insert(db, errors_table, dict(column=col_name, value=value,
86
                error_code=error_code, error=error), recover=True,
87
                cacheable=True, log_level=4)
88
        except sql.DuplicateKeyException: pass
89

    
90
class ExcToErrorsTable(sql_gen.ExcToWarning):
91
    '''Handles an exception by saving it or converting it to a warning.'''
92
    def __init__(self, return_, srcs, errors_table, value=None):
93
        '''
94
        @param return_ See sql_gen.ExcToWarning
95
        @param srcs The column names for the errors table
96
        @param errors_table None|sql_gen.Table
97
        @param value The value (or an expression for it) that caused the error
98
        @pre The invalid value must be in a local variable "value" of type text.
99
        '''
100
        sql_gen.ExcToWarning.__init__(self, return_)
101
        
102
        value = sql_gen.as_Code(value)
103
        
104
        self.srcs = srcs
105
        self.errors_table = errors_table
106
        self.value = value
107
    
108
    def to_str(self, db):
109
        if not self.srcs or self.errors_table == None:
110
            return sql_gen.ExcToWarning.to_str(self, db)
111
        
112
        errors_table_cols = map(sql_gen.Col,
113
            ['column', 'value', 'error_code', 'error'])
114
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
115
            [[c.name] for c in self.srcs]), order_by=None)
116
        insert_query = sql.mk_insert_select(db, self.errors_table,
117
            errors_table_cols,
118
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
119
        return '''\
120
-- Save error in errors table.
121
DECLARE
122
    error_code text := SQLSTATE;
123
    error text := SQLERRM;
124
    value text := '''+self.value.to_str(db)+''';
125
    "column" text;
126
BEGIN
127
    -- Insert the value and error for *each* source column.
128
'''+strings.indent(sql_gen.RowExcIgnore(None, col_names_query, insert_query,
129
    row_var=errors_table_cols[0]).to_str(db))+'''
130
END;
131

    
132
'''+self.return_.to_str(db)
133

    
134
def data_exception_handler(*args, **kw_args):
135
    '''Handles a data_exception by saving it or converting it to a warning.
136
    For params, see ExcToErrorsTable().
137
    '''
138
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
139

    
140
def cast(db, type_, col, errors_table=None):
141
    '''Casts an (unrenamed) column or value.
142
    If errors_table set and col has srcs, saves errors in errors_table (using
143
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
144
    @param col str|sql_gen.Col|sql_gen.Literal
145
    @param errors_table None|sql_gen.Table|str
146
    '''
147
    col = sql_gen.as_Col(col)
148
    
149
    # Don't convert exceptions to warnings for user-supplied constants
150
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
151
    
152
    assert not isinstance(col, sql_gen.NamedCol)
153
    
154
    function_name = strings.first_word(type_)
155
    srcs = col.srcs
156
    save_errors = errors_table != None and srcs
157
    if save_errors: # function will be unique for the given srcs
158
        function_name = strings.ustr(sql_gen.FunctionCall(function_name,
159
            *map(sql_gen.to_name_only_col, srcs)))
160
    function = db.TempFunction(function_name)
161
    
162
    # Create function definition
163
    modifiers = 'STRICT'
164
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
165
    value_param = sql_gen.FunctionParam('value', 'anyelement')
166
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
167
        value_param.name)
168
    body = sql_gen.CustomCode(handler.to_str(db, '''\
169
/* The explicit cast to the return type is needed to make the cast happen
170
inside the try block. (Implicit casts to the return type happen at the end
171
of the function, outside any block.) */
172
RETURN '''+sql_gen.Cast(type_, sql_gen.CustomCode('value')).to_str(db)+''';
173
'''))
174
    body.lang='plpgsql'
175
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
176
        [value_param], modifiers))
177
    
178
    return sql_gen.FunctionCall(function, col)
179

    
180
def func_wrapper_exception_handler(db, return_, args, errors_table):
181
    '''Handles a function call's data_exceptions.
182
    Supports PL/Python functions.
183
    @param return_ See data_exception_handler()
184
    @param args [arg...] Function call's args
185
    @param errors_table See data_exception_handler()
186
    '''
187
    args = filter(sql_gen.has_srcs, args)
188
    
189
    srcs = sql_gen.cross_join_srcs(args)
190
    value = sql_gen.merge_not_null(db, ',', args)
191
    return sql_gen.NestedExcHandler(
192
        data_exception_handler(return_, srcs, errors_table, value)
193
        , sql_gen.plpythonu_error_handler
194
        )
195

    
196
def cast_temp_col(db, type_, col, errors_table=None):
197
    '''Like cast(), but creates a new column with the cast values if the input
198
    is a column.
199
    @return The new column or cast value
200
    '''
201
    def cast_(col): return cast(db, type_, col, errors_table)
202
    
203
    try: col = sql_gen.underlying_col(col)
204
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
205
    
206
    table = col.table
207
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
208
    expr = cast_(col)
209
    
210
    # Add column
211
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
212
    sql.add_col(db, table, new_typed_col, comment=strings.urepr(col)+'::'+type_)
213
    new_col.name = new_typed_col.name # propagate any renaming
214
    
215
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
216
    
217
    return new_col
218

    
219
def errors_table(db, table, if_exists=True):
220
    '''
221
    @param if_exists If set, returns None if the errors table doesn't exist
222
    @return None|sql_gen.Table
223
    '''
224
    table = sql_gen.as_Table(table)
225
    if table.srcs != (): table = table.srcs[0]
226
    
227
    errors_table = sql_gen.suffixed_table(table, '.errors')
228
    if if_exists and not sql.table_exists(db, errors_table): return None
229
    return errors_table
230

    
231
def mk_errors_table(db, table):
232
    errors_table_ = errors_table(db, table, if_exists=False)
233
    if sql.table_exists(db, errors_table_, cacheable=False): return
234
    
235
    typed_cols = [
236
        sql_gen.TypedCol('column', 'text'),
237
        sql_gen.TypedCol('value', 'text'),
238
        sql_gen.TypedCol('error_code', 'character varying(5)', nullable=False),
239
        sql_gen.TypedCol('error', 'text', nullable=False),
240
        ]
241
    sql.create_table(db, errors_table_, typed_cols, has_pkey=False)
242
    index_cols = ['column', sql_gen.CustomCode('md5(value)'), 'error_code',
243
        sql_gen.CustomCode('md5(error)')]
244
    sql.add_index(db, index_cols, errors_table_, unique=True)
245

    
246
##### Import
247

    
248
row_num_col_def = copy.copy(sql.row_num_col_def)
249
row_num_col_def.name = 'row_num'
250
row_num_col_def.type = 'integer'
251

    
252
def append_csv(db, table, reader, header):
253
    def esc_name_(name): return sql.esc_name(db, name)
254
    
255
    def log(msg, level=1): db.log_debug(msg, level)
256
    
257
    # Wrap in standardizing stream
258
    cols_ct = len(header)
259
    stream = csvs.InputRewriter(streams.ProgressInputStream(csvs.StreamFilter(
260
        csvs.ColCtFilter(reader, cols_ct)), sys.stderr, msg='Read %d row(s)',
261
        n=1000))
262
    #streams.copy(stream, sys.stderr) # to troubleshoot copy_expert() errors
263
    dialect = stream.dialect # use default dialect
264
    
265
    # Create COPY FROM statement
266
    if header == sql.table_col_names(db, table): cols_str = ''
267
    else: cols_str =' ('+(', '.join(map(esc_name_, header)))+')'
268
    copy_from = ('COPY '+table.to_str(db)+cols_str+' FROM STDIN DELIMITER '
269
        +db.esc_value(dialect.delimiter)+' NULL '+db.esc_value(''))
270
    assert not csvs.is_tsv(dialect)
271
    copy_from += ' CSV'
272
    if dialect.quoting != csv.QUOTE_NONE:
273
        quote_str = db.esc_value(dialect.quotechar)
274
        copy_from += ' QUOTE '+quote_str
275
        if dialect.doublequote: copy_from += ' ESCAPE '+quote_str
276
    copy_from += ';\n'
277
    
278
    log(copy_from, level=2)
279
    try: db.db.cursor().copy_expert(copy_from, stream)
280
    except Exception, e: sql.parse_exception(db, e, recover=True)
281

    
282
def import_csv(db, table, reader, header):
283
    def log(msg, level=1): db.log_debug(msg, level)
284
    
285
    # Get format info
286
    col_names = map(strings.to_unicode, header)
287
    for i, col in enumerate(col_names): # replace empty column names
288
        if col == '': col_names[i] = 'column_'+str(i)
289
    
290
    # Select schema and escape names
291
    def esc_name(name): return db.esc_name(name)
292
    
293
    typed_cols = [sql_gen.TypedCol(v, 'text') for v in col_names]
294
    typed_cols.insert(0, row_num_col_def)
295
    header.insert(0, row_num_col_def.name)
296
    reader = csvs.RowNumFilter(reader)
297
    
298
    log('Creating table')
299
    # Note that this is not rolled back if the import fails. Instead, it is
300
    # cached, and will not be re-run if the import is retried.
301
    sql.create_table(db, table, typed_cols, has_pkey=False, col_indexes=False)
302
    
303
    # Free memory used by deleted (rolled back) rows from any failed import.
304
    # This MUST be run so that the rows will be stored in inserted order, and
305
    # the row_num added after import will match up with the CSV's row order.
306
    sql.truncate(db, table)
307
    
308
    # Load the data
309
    def load(): append_csv(db, table, reader, header)
310
    sql.with_savepoint(db, load)
311
    
312
    cleanup_table(db, table)
313

    
314
def put(db, table, row, pkey_=None, row_ct_ref=None, on_error=exc.reraise):
315
    '''Recovers from errors.
316
    Only works under PostgreSQL (uses INSERT RETURNING).
317
    '''
318
    return put_table(db, table, [], row, row_ct_ref, on_error=on_error)
319

    
320
def get(db, table, row, pkey, row_ct_ref=None, create=False):
321
    '''Recovers from errors'''
322
    try:
323
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
324
            recover=True))
325
    except StopIteration:
326
        if not create: raise
327
        return put(db, table, row, pkey, row_ct_ref) # insert new row
328

    
329
def is_func_result(col):
330
    return col.table.name.find('(') >= 0 and col.name == 'result'
331

    
332
def into_table_name(out_table, in_tables0, mapping, is_func):
333
    def in_col_str(in_col):
334
        in_col = sql_gen.remove_col_rename(in_col)
335
        if isinstance(in_col, sql_gen.Col):
336
            table = in_col.table
337
            if table == in_tables0:
338
                in_col = sql_gen.to_name_only_col(in_col)
339
            elif is_func_result(in_col): in_col = table # omit col name
340
        return strings.ustr(in_col)
341
    
342
    str_ = strings.ustr(out_table)
343
    if is_func:
344
        str_ += '('
345
        
346
        try: value_in_col = mapping['value']
347
        except KeyError:
348
            str_ += ', '.join((strings.ustr(k)+'='+in_col_str(v)
349
                for k, v in mapping.iteritems()))
350
        else: str_ += in_col_str(value_in_col)
351
        
352
        str_ += ')'
353
    else:
354
        out_col = 'rank'
355
        try: in_col = mapping[out_col]
356
        except KeyError: str_ += '_pkeys'
357
        else: # has a rank column, so hierarchical
358
            str_ += '['+strings.ustr(out_col)+'='+in_col_str(in_col)+']'
359
    return str_
360

    
361
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, default=None,
362
    col_defaults={}, on_error=exc.reraise):
363
    '''Recovers from errors.
364
    Only works under PostgreSQL (uses INSERT RETURNING).
365
    
366
    Warning: This function's normalizing algorithm does not support database
367
    triggers that populate fields covered by the unique constraint used to do
368
    the DISTINCT ON. Such fields must be populated by the mappings instead.
369
    (Other unique constraints and other non-unique fields are not affected by
370
    this restriction on triggers. Note that the primary key will normally not be
371
    the DISTINCT ON constraint, so trigger-populated natural keys are supported
372
    *unless* the input table contains duplicate rows for some generated keys.)
373
    
374
    Note that much of the complexity of the normalizing algorithm is due to
375
    PostgreSQL (and other DB systems) not having a native command for
376
    INSERT ON DUPLICATE SELECT (wiki.vegpath.org/INSERT_ON_DUPLICATE_SELECT).
377
    For PostgreSQL 9.1+, this can now be emulated using INSTEAD OF triggers.
378
    For earlier versions, you instead have to use this function.
379
    
380
    @param in_tables The main input table to select from, followed by a list of
381
        tables to join with it using the main input table's pkey
382
    @param mapping dict(out_table_col=in_table_col, ...)
383
        * out_table_col: str (*not* sql_gen.Col)
384
        * in_table_col: sql_gen.Col|literal-value
385
    @param default The *output* column to use as the pkey for missing rows.
386
        If this output column does not exist in the mapping, uses None.
387
        Note that this will be used for *all* missing rows, regardless of which
388
        error caused them not to be inserted.
389
    @param col_defaults Default values for required columns.
390
    @return sql_gen.Col Where the output pkeys are made available
391
    '''
392
    import psycopg2.extensions
393
    
394
    # Special handling for functions with hstore params
395
    if out_table == '_map':
396
        import psycopg2.extras
397
        psycopg2.extras.register_hstore(db.db)
398
        
399
        # Parse args
400
        try: value = mapping.pop('value')
401
        except KeyError: return None # value required
402
        
403
        mapping = dict([(k, sql_gen.get_value(v))
404
            for k, v in mapping.iteritems()]) # unwrap literal value
405
        mapping = dict(map=mapping, value=value) # non-value params -> hstore
406
    
407
    out_table = sql_gen.as_Table(out_table)
408
    
409
    def log_debug(msg): db.log_debug(msg, level=1.5)
410
    def col_ustr(str_):
411
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
412
    
413
    log_debug('********** New iteration **********')
414
    log_debug('Inserting these input columns into '+strings.as_tt(
415
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
416
    
417
    is_function = sql.function_exists(db, out_table)
418
    
419
    if is_function: row_ct_ref = None # only track inserted rows
420
    
421
    # Warn if inserting empty table rows
422
    if not mapping and not is_function: # functions with no args OK
423
        warnings.warn(UserWarning('Inserting empty table row(s)'))
424
    
425
    if is_function: out_pkey = 'result'
426
    else: out_pkey = sql.pkey_name(db, out_table, recover=True)
427
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
428
    
429
    in_tables_ = copy.copy(in_tables) # don't modify input!
430
    try: in_tables0 = in_tables_.pop(0) # first table is separate
431
    except IndexError: in_tables0 = None
432
    else:
433
        in_pkey = sql.pkey_name(db, in_tables0, recover=True)
434
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
435
    
436
    # Determine if can use optimization for only literal values
437
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
438
        mapping.values()), False)
439
    is_literals_or_function = is_literals or is_function
440
    
441
    if in_tables0 == None: errors_table_ = None
442
    else: errors_table_ = errors_table(db, in_tables0)
443
    
444
    # Create input joins from list of input tables
445
    input_joins = [in_tables0]+[sql_gen.Join(v,
446
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
447
    
448
    orig_mapping = mapping.copy()
449
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
450
        mapping = {out_pkey: None} # ColDict will replace with default value
451
    
452
    if not is_literals:
453
        into = sql_gen.as_Table(into_table_name(out_table, in_tables0, mapping,
454
            is_function))
455
        # Ensure into's out_pkey is different from in_pkey by prepending "out."
456
        if is_function: into_out_pkey = out_pkey
457
        else: into_out_pkey = 'out.'+out_pkey
458
        
459
        # Set column sources
460
        in_cols = filter(sql_gen.is_table_col, mapping.values())
461
        for col in in_cols:
462
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
463
        
464
        log_debug('Joining together input tables into temp table')
465
        # Place in new table so don't modify input and for speed
466
        in_table = sql_gen.Table('in')
467
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
468
            in_cols, preserve=[in_pkey_col]))
469
        input_joins = [in_table]
470
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
471
    
472
    # Wrap mapping in a sql_gen.ColDict.
473
    # sql_gen.ColDict sanitizes both keys and values passed into it.
474
    # Do after applying dicts.join() because that returns a plain dict.
475
    mapping = sql_gen.ColDict(db, out_table, mapping)
476
    
477
    # Save all rows since in_table may have rows deleted
478
    if is_literals: pass
479
    elif is_function: full_in_table = in_table
480
    else:
481
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
482
        sql.copy_table(db, in_table, full_in_table)
483
    
484
    pkeys_table_exists_ref = [False]
485
    def insert_into_pkeys(query, **kw_args):
486
        if pkeys_table_exists_ref[0]:
487
            sql.insert_select(db, into, [in_pkey, into_out_pkey], query,
488
                **kw_args)
489
        else:
490
            kw_args.setdefault('add_pkey_', True)
491
            # don't warn if can't create pkey, because this just indicates that,
492
            # at some point in the import tree, it used a set-returning function
493
            kw_args.setdefault('add_pkey_warn', False)
494
            
495
            sql.run_query_into(db, query, into=into, **kw_args)
496
            pkeys_table_exists_ref[0] = True
497
    
498
    def mk_main_select(joins, cols): return sql.mk_select(db, joins, cols)
499
    
500
    if is_literals: insert_in_table = None
501
    else:
502
        insert_in_table = in_table
503
        insert_in_tables = [insert_in_table]
504
    join_cols = sql_gen.ColDict(db, out_table)
505
    join_custom_cond = None
506
    
507
    exc_strs = set()
508
    def log_exc(e):
509
        e_str = exc.str_(e, first_line_only=True)
510
        log_debug('Caught exception: '+e_str)
511
        if e_str in exc_strs: # avoid infinite loops
512
            log_debug('Exception already seen, handler broken')
513
            on_error(e)
514
            remove_all_rows()
515
            return False
516
        else: exc_strs.add(e_str)
517
        return True
518
    
519
    ignore_all_ref = [False]
520
    def remove_all_rows():
521
        log_debug('Ignoring all rows')
522
        ignore_all_ref[0] = True # just return the default value column
523
    
524
    def handle_unknown_exc(e):
525
        log_debug('No handler for exception')
526
        on_error(e)
527
        remove_all_rows()
528
    
529
    def ensure_cond(cond, e, passed=False, failed=False):
530
        '''
531
        @param passed at least one row passed the constraint
532
        @param failed at least one row failed the constraint
533
        '''
534
        if is_literals: # we know the constraint was applied exactly once
535
            if passed: pass
536
            elif failed: remove_all_rows()
537
            else: raise NotImplementedError()
538
        else:
539
            if not is_function:
540
                out_table_cols = sql_gen.ColDict(db, out_table)
541
                out_table_cols.update(util.dict_subset_right_join({},
542
                    sql.table_col_names(db, out_table)))
543
            
544
            in_cols = []
545
            cond = strings.ustr(cond)
546
            orig_cond = cond
547
            cond = sql_gen.map_expr(db, cond, mapping, in_cols)
548
            if not is_function:
549
                cond = sql_gen.map_expr(db, cond, out_table_cols)
550
            
551
            log_debug('Ignoring rows that do not satisfy '+strings.as_tt(cond))
552
            cur = None
553
            if cond == sql_gen.false_expr:
554
                assert failed
555
                remove_all_rows()
556
            elif cond == sql_gen.true_expr: assert passed
557
            else:
558
                while True:
559
                    not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
560
                    try:
561
                        cur = sql.delete(db, insert_in_table, not_cond)
562
                        break
563
                    except sql.DoesNotExistException, e:
564
                        if e.type != 'column': raise
565
                        
566
                        last_cond = cond
567
                        cond = sql_gen.map_expr(db, cond, {e.name: None})
568
                        if cond == last_cond: raise # not fixable
569
            
570
            # If any rows failed cond
571
            if failed or cur != None and cur.rowcount > 0:
572
                track_data_error(db, errors_table_,
573
                    sql_gen.cross_join_srcs(in_cols), None, e.cause.pgcode,
574
                    strings.ensure_newl(strings.ustr(e.cause.pgerror))
575
                    +'condition: '+orig_cond+'\ntranslated condition: '+cond)
576
    
577
    not_null_cols = set()
578
    def ignore(in_col, value, e):
579
        if sql_gen.is_table_col(in_col):
580
            in_col = sql_gen.with_table(in_col, insert_in_table)
581
            
582
            track_data_error(db, errors_table_, in_col.srcs, value,
583
                e.cause.pgcode, e.cause.pgerror)
584
            
585
            sql.add_index(db, in_col, insert_in_table) # enable fast filtering
586
            if value != None and in_col not in not_null_cols:
587
                log_debug('Replacing invalid value '
588
                    +strings.as_tt(strings.urepr(value))+' with NULL in column '
589
                    +strings.as_tt(in_col.to_str(db)))
590
                sql.update(db, insert_in_table, [(in_col, None)],
591
                    sql_gen.ColValueCond(in_col, value))
592
            else:
593
                log_debug('Ignoring rows with '+strings.as_tt(in_col.to_str(db))
594
                    +' = '+strings.as_tt(strings.urepr(value)))
595
                sql.delete(db, insert_in_table,
596
                    sql_gen.ColValueCond(in_col, value))
597
                if value == None: not_null_cols.add(in_col)
598
        else:
599
            assert isinstance(in_col, sql_gen.NamedCol)
600
            in_value = sql_gen.remove_col_rename(in_col)
601
            assert sql_gen.is_literal(in_value)
602
            if value == in_value.value:
603
                if value != None:
604
                    log_debug('Replacing invalid literal '
605
                        +strings.as_tt(in_col.to_str(db))+' with NULL')
606
                    mapping[in_col.name] = None
607
                else:
608
                    remove_all_rows()
609
            # otherwise, all columns were being ignore()d because the specific
610
            # column couldn't be identified, and this was not the invalid column
611
    
612
    if not is_literals:
613
        def insert_pkeys_table(which):
614
            return sql_gen.Table(sql_gen.concat(in_table.name,
615
                '_insert_'+which+'_pkeys'))
616
        insert_out_pkeys = insert_pkeys_table('out')
617
        insert_in_pkeys = insert_pkeys_table('in')
618
    
619
    def mk_func_call():
620
        args = dict(((k.name, v) for k, v in mapping.iteritems()))
621
        return sql_gen.FunctionCall(out_table, **args), args
622
    
623
    def handle_MissingCastException(e):
624
        if not log_exc(e): return False
625
        
626
        type_ = e.type
627
        if e.col == None: out_cols = mapping.keys()
628
        else: out_cols = [e.col]
629
        
630
        for out_col in out_cols:
631
            log_debug('Casting '+strings.as_tt(strings.repr_no_u(out_col))
632
                +' input to '+strings.as_tt(type_))
633
            in_col = mapping[out_col]
634
            while True:
635
                try:
636
                    cast_col = cast_temp_col(db, type_, in_col, errors_table_)
637
                    mapping[out_col] = cast_col
638
                    if out_col in join_cols: join_cols[out_col] = cast_col
639
                    break # cast successful
640
                except sql.InvalidValueException, e:
641
                    if not log_exc(e): return False
642
                    
643
                    ignore(in_col, e.value, e)
644
        
645
        return True
646
    
647
    missing_msg = None
648
    
649
    # Do inserts and selects
650
    while True:
651
        has_joins = join_cols != {}
652
        
653
        if ignore_all_ref[0]: break # unrecoverable error, so don't do main case
654
        
655
        # Prepare to insert new rows
656
        if is_function:
657
            if is_literals:
658
                log_debug('Calling function')
659
                func_call, args = mk_func_call()
660
        else:
661
            log_debug('Trying to insert new rows')
662
            insert_args = dict(recover=True, cacheable=False)
663
            if has_joins:
664
                insert_args.update(dict(ignore=True))
665
            else:
666
                insert_args.update(dict(returning=out_pkey))
667
                if not is_literals:
668
                    insert_args.update(dict(into=insert_out_pkeys))
669
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
670
                c, insert_in_table) for c in mapping.values()])
671
        
672
        try:
673
            cur = None
674
            if is_function:
675
                if is_literals:
676
                    cur = sql.select(db, fields=[func_call], recover=True,
677
                        cacheable=True)
678
                else:
679
                    log_debug('Defining wrapper function')
680
                    
681
                    func_call, args = mk_func_call()
682
                    func_call = sql_gen.NamedCol(into_out_pkey, func_call)
683
                    
684
                    # Create empty pkeys table so its row type can be used
685
                    insert_into_pkeys(sql.mk_select(db, input_joins,
686
                        [in_pkey_col, func_call], limit=0), add_pkey_=False,
687
                        recover=True)
688
                    result_type = db.col_info(sql_gen.Col(into_out_pkey,
689
                        into)).type
690
                    
691
                    ## Create error handling wrapper function
692
                    
693
                    wrapper = db.TempFunction(sql_gen.concat(into.name,
694
                        '_wrap'))
695
                    
696
                    select_cols = [in_pkey_col]+args.values()
697
                    row_var = copy.copy(sql_gen.row_var)
698
                    row_var.set_srcs([in_table])
699
                    in_pkey_var = sql_gen.Col(in_pkey, row_var)
700
                    
701
                    args = dict(((k, sql_gen.with_table(v, row_var))
702
                        for k, v in args.iteritems()))
703
                    func_call = sql_gen.FunctionCall(out_table, **args)
704
                    
705
                    def mk_return(result):
706
                        return sql_gen.ReturnQuery(sql.mk_select(db,
707
                            fields=[in_pkey_var, result], explain=False))
708
                    exc_handler = func_wrapper_exception_handler(db,
709
                        mk_return(sql_gen.Cast(result_type, None)),
710
                        args.values(), errors_table_)
711
                    
712
                    sql.define_func(db, sql_gen.FunctionDef(wrapper,
713
                        sql_gen.SetOf(into),
714
                        sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
715
                            sql.mk_select(db, input_joins),
716
                            mk_return(func_call), exc_handler=exc_handler)
717
                        ))
718
                    wrapper_table = sql_gen.FunctionCall(wrapper)
719
                    
720
                    log_debug('Calling function')
721
                    insert_into_pkeys(sql.mk_select(db, wrapper_table,
722
                        order_by=None), recover=True, cacheable=False)
723
                    sql.add_pkey_or_index(db, into)
724
            else:
725
                cur = sql.insert_select(db, out_table, mapping.keys(),
726
                    main_select, **insert_args)
727
            break # insert successful
728
        except sql.MissingCastException, e:
729
            if not handle_MissingCastException(e): break
730
        except sql.DuplicateKeyException, e:
731
            if not log_exc(e): break
732
            
733
            # Different rows violating different unique constraints not
734
            # supported
735
            assert not join_cols
736
            
737
            join_custom_cond = e.cond
738
            if e.cond != None: ensure_cond(e.cond, e, passed=True)
739
            
740
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
741
            log_debug('Ignoring existing rows, comparing on these columns:\n'
742
                +strings.as_inline_table(join_cols, ustr=col_ustr))
743
            
744
            if is_literals:
745
                return sql.value(sql.select(db, out_table, [out_pkey_col],
746
                    join_cols, order_by=None))
747
            
748
            # Uniquify and filter input table to avoid (most) duplicate keys
749
            # (Additional duplicates may be added concurrently and will be
750
            # filtered out separately upon insert.)
751
            while True:
752
                try:
753
                    insert_in_table = sql.distinct_table(db, insert_in_table,
754
                        join_cols.values(), [insert_in_table,
755
                        sql_gen.Join(out_table, join_cols, sql_gen.filter_out,
756
                        e.cond)])
757
                    insert_in_tables.append(insert_in_table)
758
                    break # insert successful
759
                except sql.MissingCastException, e1: # don't modify outer e
760
                    if not handle_MissingCastException(e1): break
761
        except sql.NullValueException, e:
762
            if not log_exc(e): break
763
            
764
            out_col, = e.cols
765
            try: in_col = mapping[out_col]
766
            except KeyError, e:
767
                try: in_col = mapping[out_col] = col_defaults[out_col]
768
                except KeyError:
769
                    missing_msg = 'Missing mapping for NOT NULL column '+out_col
770
                    log_debug(missing_msg)
771
                    remove_all_rows()
772
            else: ignore(in_col, None, e)
773
        except sql.CheckException, e:
774
            if not log_exc(e): break
775
            
776
            ensure_cond(e.cond, e, failed=True)
777
        except sql.InvalidValueException, e:
778
            if not log_exc(e): break
779
            
780
            for in_col in mapping.values(): ignore(in_col, e.value, e)
781
        except psycopg2.extensions.TransactionRollbackError, e:
782
            if not log_exc(e): break
783
            # retry
784
        except sql.DatabaseErrors, e:
785
            if not log_exc(e): break
786
            
787
            handle_unknown_exc(e)
788
        # after exception handled, rerun loop with additional constraints
789
    
790
    # Resolve default value column
791
    if default != None:
792
        if ignore_all_ref[0]: mapping.update(orig_mapping) # use input cols
793
        try: default = mapping[default]
794
        except KeyError:
795
            db.log_debug('Default value column '
796
                +strings.as_tt(strings.repr_no_u(default))
797
                +' does not exist in mapping, falling back to None', level=2.1)
798
            default = None
799
        else: default = sql_gen.remove_col_rename(default)
800
    
801
    if missing_msg != None and default == None:
802
        warnings.warn(UserWarning(missing_msg))
803
        # not an error because sometimes the mappings include
804
        # extra tables which aren't used by the dataset
805
    
806
    # Handle unrecoverable errors
807
    if ignore_all_ref[0]:
808
        log_debug('Returning default: '+strings.as_tt(strings.urepr(default)))
809
        return default
810
    
811
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
812
        row_ct_ref[0] += cur.rowcount
813
    
814
    if is_literals: return sql.value_or_none(cur) # support multi-row functions
815
    
816
    if is_function: pass # pkeys table already created
817
    elif has_joins:
818
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols,
819
            custom_cond=join_custom_cond)]
820
        log_debug('Getting output table pkeys of existing/inserted rows')
821
        insert_into_pkeys(sql.mk_select(db, select_joins, [in_pkey_col,
822
            sql_gen.NamedCol(into_out_pkey, out_pkey_col)], order_by=None))
823
    else:
824
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
825
        
826
        log_debug('Getting input table pkeys of inserted rows')
827
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
828
        # since the SELECT query is identical to the one used in INSERT SELECT,
829
        # its rows will be retrieved in the same order.
830
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
831
            into=insert_in_pkeys)
832
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
833
        
834
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
835
            db, insert_in_pkeys)
836
        
837
        log_debug('Combining output and input pkeys in inserted order')
838
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
839
            {sql.row_num_col: sql_gen.join_same_not_null})]
840
        in_col = sql_gen.Col(in_pkey, insert_in_pkeys)
841
        out_col = sql_gen.NamedCol(into_out_pkey,
842
            sql_gen.Col(out_pkey, insert_out_pkeys))
843
        insert_into_pkeys(sql.mk_select(db, pkey_joins, [in_col, out_col],
844
            order_by=None))
845
        
846
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
847
    
848
    if not is_function: # is_function doesn't leave holes
849
        log_debug('Setting pkeys of missing rows to '
850
            +strings.as_tt(strings.urepr(default)))
851
        
852
        full_in_pkey_col = sql_gen.Col(in_pkey, full_in_table)
853
        if sql_gen.is_table_col(default):
854
            default = sql_gen.with_table(default, full_in_table)
855
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
856
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
857
            # must use join_same_not_null or query will take forever
858
        
859
        insert_args = dict(order_by=None)
860
        if not sql.table_has_pkey(db, full_in_table): # in_table has duplicates
861
            insert_args.update(dict(distinct_on=[full_in_pkey_col]))
862
        
863
        insert_into_pkeys(sql.mk_select(db, missing_rows_joins,
864
            [full_in_pkey_col, sql_gen.NamedCol(into_out_pkey, default)],
865
            **insert_args))
866
    # otherwise, there is already an entry for every row
867
    
868
    sql.empty_temp(db, insert_in_tables+[full_in_table])
869
    
870
    srcs = []
871
    if is_function: srcs = sql_gen.cols_srcs(in_cols)
872
    return sql_gen.Col(into_out_pkey, into, srcs)
(37-37/49)