Project

General

Profile

1
# Database import/export
2

    
3
import copy
4
import csv
5
import operator
6
import warnings
7
import sys
8

    
9
import csvs
10
import exc
11
import dicts
12
import sql
13
import sql_gen
14
import streams
15
import strings
16
import util
17

    
18
##### Exceptions
19

    
20
# Can't use built-in SyntaxError because it stringifies to only the first line
21
class SyntaxError(Exception): pass
22

    
23
##### Data cleanup
24

    
25
def table_nulls_mapped__set(db, table):
26
    assert isinstance(table, sql_gen.Table)
27
    sql.run_query(db, 'SELECT util.table_nulls_mapped__set('
28
        +sql_gen.table2regclass_text(db, table)+')')
29

    
30
def table_nulls_mapped__get(db, table):
31
    assert isinstance(table, sql_gen.Table)
32
    return sql.value(sql.run_query(db, 'SELECT util.table_nulls_mapped__get('
33
        +sql_gen.table2regclass_text(db, table)+')'))
34

    
35
null_strs = ['', '-', r'\N', 'NULL', 'UNKNOWN', 'nulo']
36

    
37
def cleanup_table(db, table):
38
    '''idempotent'''
39
    table = sql_gen.as_Table(table)
40
    
41
    if table_nulls_mapped__get(db, table): return # already cleaned up
42
    
43
    cols = filter(lambda c: sql_gen.is_text_col(db, c),
44
        sql.table_cols(db, table))
45
    try: pkey_col = sql.table_pkey_col(db, table)
46
    except sql.DoesNotExistException: pass
47
    else:
48
        try: cols.remove(pkey_col)
49
        except ValueError: pass
50
    if not cols: return
51
    
52
    db.log_debug('Cleaning up table', level=1.5)
53
    
54
    expr = 'trim(both from %s)'
55
    for null in null_strs: expr = 'nullif('+expr+', '+db.esc_value(null)+')'
56
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db))) for v in cols]
57
    
58
    while True:
59
        try:
60
            sql.update(db, table, changes, in_place=True, recover=True)
61
            break # successful
62
        except sql.NullValueException, e:
63
            db.log_debug('Caught exception: '+exc.str_(e))
64
            col, = e.cols
65
            sql.drop_not_null(db, col)
66
    
67
    db.log_debug('Vacuuming and reanalyzing table', level=1.5)
68
    sql.vacuum(db, table)
69
    
70
    table_nulls_mapped__set(db, table)
71

    
72
##### Error tracking
73

    
74
def track_data_error(db, errors_table, cols, value, error_code, error):
75
    '''
76
    @param errors_table If None, does nothing.
77
    '''
78
    if errors_table == None: return
79
    
80
    col_names = [c.name for c in cols]
81
    if not col_names: col_names = [None] # need at least one entry
82
    for col_name in col_names:
83
        try:
84
            sql.insert(db, errors_table, dict(column=col_name, value=value,
85
                error_code=error_code, error=error), recover=True,
86
                cacheable=True, log_level=4)
87
        except sql.DuplicateKeyException: pass
88

    
89
class ExcToErrorsTable(sql_gen.ExcToWarning):
90
    '''Handles an exception by saving it or converting it to a warning.'''
91
    def __init__(self, return_, srcs, errors_table, value=None):
92
        '''
93
        @param return_ See sql_gen.ExcToWarning
94
        @param srcs The column names for the errors table
95
        @param errors_table None|sql_gen.Table
96
        @param value The value (or an expression for it) that caused the error
97
        @pre The invalid value must be in a local variable "value" of type text.
98
        '''
99
        sql_gen.ExcToWarning.__init__(self, return_)
100
        
101
        value = sql_gen.as_Code(value)
102
        
103
        self.srcs = srcs
104
        self.errors_table = errors_table
105
        self.value = value
106
    
107
    def to_str(self, db):
108
        if not self.srcs or self.errors_table == None:
109
            return sql_gen.ExcToWarning.to_str(self, db)
110
        
111
        errors_table_cols = map(sql_gen.Col,
112
            ['column', 'value', 'error_code', 'error'])
113
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
114
            [[c.name] for c in self.srcs]), order_by=None)
115
        insert_query = sql.mk_insert_select(db, self.errors_table,
116
            errors_table_cols,
117
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
118
        return '''\
119
-- Save error in errors table.
120
DECLARE
121
    error_code text := SQLSTATE;
122
    error text := SQLERRM;
123
    value text := '''+self.value.to_str(db)+''';
124
    "column" text;
125
BEGIN
126
    -- Insert the value and error for *each* source column.
127
'''+strings.indent(sql_gen.RowExcIgnore(None, col_names_query, insert_query,
128
    row_var=errors_table_cols[0]).to_str(db))+'''
129
END;
130

    
131
'''+self.return_.to_str(db)
132

    
133
def data_exception_handler(*args, **kw_args):
134
    '''Handles a data_exception by saving it or converting it to a warning.
135
    For params, see ExcToErrorsTable().
136
    '''
137
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
138

    
139
def cast(db, type_, col, errors_table=None):
140
    '''Casts an (unrenamed) column or value.
141
    If errors_table set and col has srcs, saves errors in errors_table (using
142
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
143
    @param col str|sql_gen.Col|sql_gen.Literal
144
    @param errors_table None|sql_gen.Table|str
145
    '''
146
    col = sql_gen.as_Col(col)
147
    
148
    # Don't convert exceptions to warnings for user-supplied constants
149
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
150
    
151
    assert not isinstance(col, sql_gen.NamedCol)
152
    
153
    function_name = strings.first_word(type_)
154
    srcs = col.srcs
155
    save_errors = errors_table != None and srcs
156
    if save_errors: # function will be unique for the given srcs
157
        function_name = strings.ustr(sql_gen.FunctionCall(function_name,
158
            *map(sql_gen.to_name_only_col, srcs)))
159
    function = db.TempFunction(function_name)
160
    
161
    # Create function definition
162
    modifiers = 'STRICT'
163
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
164
    value_param = sql_gen.FunctionParam('value', 'anyelement')
165
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
166
        value_param.name)
167
    body = sql_gen.CustomCode(handler.to_str(db, '''\
168
/* The explicit cast to the return type is needed to make the cast happen
169
inside the try block. (Implicit casts to the return type happen at the end
170
of the function, outside any block.) */
171
RETURN '''+sql_gen.Cast(type_, sql_gen.CustomCode('value')).to_str(db)+''';
172
'''))
173
    body.lang='plpgsql'
174
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
175
        [value_param], modifiers))
176
    
177
    return sql_gen.FunctionCall(function, col)
178

    
179
def func_wrapper_exception_handler(db, return_, args, errors_table):
180
    '''Handles a function call's data_exceptions.
181
    Supports PL/Python functions.
182
    @param return_ See data_exception_handler()
183
    @param args [arg...] Function call's args
184
    @param errors_table See data_exception_handler()
185
    '''
186
    args = filter(sql_gen.has_srcs, args)
187
    
188
    srcs = sql_gen.cross_join_srcs(args)
189
    value = sql_gen.merge_not_null(db, ',', args)
190
    return sql_gen.NestedExcHandler(
191
        data_exception_handler(return_, srcs, errors_table, value)
192
        , sql_gen.plpythonu_error_handler
193
        )
194

    
195
def cast_temp_col(db, type_, col, errors_table=None):
196
    '''Like cast(), but creates a new column with the cast values if the input
197
    is a column.
198
    @return The new column or cast value
199
    '''
200
    def cast_(col): return cast(db, type_, col, errors_table)
201
    
202
    try: col = sql_gen.underlying_col(col)
203
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
204
    
205
    table = col.table
206
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
207
    expr = cast_(col)
208
    
209
    # Add column
210
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
211
    sql.add_col(db, table, new_typed_col, comment=strings.urepr(col)+'::'+type_)
212
    new_col.name = new_typed_col.name # propagate any renaming
213
    
214
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
215
    
216
    return new_col
217

    
218
def errors_table(db, table, if_exists=True):
219
    '''
220
    @param if_exists If set, returns None if the errors table doesn't exist
221
    @return None|sql_gen.Table
222
    '''
223
    table = sql_gen.as_Table(table)
224
    if table.srcs != (): table = table.srcs[0]
225
    
226
    errors_table = sql_gen.suffixed_table(table, '.errors')
227
    if if_exists and not sql.table_exists(db, errors_table): return None
228
    return errors_table
229

    
230
def mk_errors_table(db, table):
231
    errors_table_ = errors_table(db, table, if_exists=False)
232
    if sql.table_exists(db, errors_table_, cacheable=False): return
233
    
234
    typed_cols = [
235
        sql_gen.TypedCol('column', 'text'),
236
        sql_gen.TypedCol('value', 'text'),
237
        sql_gen.TypedCol('error_code', 'character varying(5)', nullable=False),
238
        sql_gen.TypedCol('error', 'text', nullable=False),
239
        ]
240
    sql.create_table(db, errors_table_, typed_cols, has_pkey=False)
241
    index_cols = ['column', sql_gen.CustomCode('md5(value)'), 'error_code',
242
        sql_gen.CustomCode('md5(error)')]
243
    sql.add_index(db, index_cols, errors_table_, unique=True)
244

    
245
##### Import
246

    
247
row_num_col_def = copy.copy(sql.row_num_col_def)
248
row_num_col_def.name = 'row_num'
249
row_num_col_def.type = 'integer'
250

    
251
def append_csv(db, table, reader, header):
252
    def esc_name_(name): return sql.esc_name(db, name)
253
    
254
    def log(msg, level=1): db.log_debug(msg, level)
255
    
256
    # Wrap in standardizing stream
257
    cols_ct = len(header)
258
    stream = csvs.InputRewriter(streams.ProgressInputStream(csvs.StreamFilter(
259
        csvs.ColCtFilter(reader, cols_ct)), sys.stderr, msg='Read %d row(s)',
260
        n=1000))
261
    dialect = stream.dialect # use default dialect
262
    
263
    # Create COPY FROM statement
264
    if header == sql.table_col_names(db, table): cols_str = ''
265
    else: cols_str =' ('+(', '.join(map(esc_name_, header)))+')'
266
    copy_from = ('COPY '+table.to_str(db)+cols_str+' FROM STDIN DELIMITER '
267
        +db.esc_value(dialect.delimiter)+' NULL '+db.esc_value(''))
268
    assert not csvs.is_tsv(dialect)
269
    copy_from += ' CSV'
270
    if dialect.quoting != csv.QUOTE_NONE:
271
        quote_str = db.esc_value(dialect.quotechar)
272
        copy_from += ' QUOTE '+quote_str
273
        if dialect.doublequote: copy_from += ' ESCAPE '+quote_str
274
    copy_from += ';\n'
275
    
276
    log(copy_from, level=2)
277
    try: db.db.cursor().copy_expert(copy_from, stream)
278
    except Exception, e: sql.parse_exception(db, e, recover=True)
279

    
280
def import_csv(db, table, reader, header):
281
    def log(msg, level=1): db.log_debug(msg, level)
282
    
283
    # Get format info
284
    col_names = map(strings.to_unicode, header)
285
    for i, col in enumerate(col_names): # replace empty column names
286
        if col == '': col_names[i] = 'column_'+str(i)
287
    
288
    # Select schema and escape names
289
    def esc_name(name): return db.esc_name(name)
290
    
291
    typed_cols = [sql_gen.TypedCol(v, 'text') for v in col_names]
292
    typed_cols.insert(0, row_num_col_def)
293
    header.insert(0, row_num_col_def.name)
294
    reader = csvs.RowNumFilter(reader)
295
    
296
    log('Creating table')
297
    # Note that this is not rolled back if the import fails. Instead, it is
298
    # cached, and will not be re-run if the import is retried.
299
    sql.create_table(db, table, typed_cols, has_pkey=False, col_indexes=False)
300
    
301
    # Free memory used by deleted (rolled back) rows from any failed import.
302
    # This MUST be run so that the rows will be stored in inserted order, and
303
    # the row_num added after import will match up with the CSV's row order.
304
    sql.truncate(db, table)
305
    
306
    # Load the data
307
    def load(): append_csv(db, table, reader, header)
308
    sql.with_savepoint(db, load)
309
    
310
    cleanup_table(db, table)
311

    
312
def put(db, table, row, pkey_=None, row_ct_ref=None, on_error=exc.reraise):
313
    '''Recovers from errors.
314
    Only works under PostgreSQL (uses INSERT RETURNING).
315
    '''
316
    return put_table(db, table, [], row, row_ct_ref, on_error=on_error)
317

    
318
def get(db, table, row, pkey, row_ct_ref=None, create=False):
319
    '''Recovers from errors'''
320
    try:
321
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
322
            recover=True))
323
    except StopIteration:
324
        if not create: raise
325
        return put(db, table, row, pkey, row_ct_ref) # insert new row
326

    
327
def is_func_result(col):
328
    return col.table.name.find('(') >= 0 and col.name == 'result'
329

    
330
def into_table_name(out_table, in_tables0, mapping, is_func):
331
    def in_col_str(in_col):
332
        in_col = sql_gen.remove_col_rename(in_col)
333
        if isinstance(in_col, sql_gen.Col):
334
            table = in_col.table
335
            if table == in_tables0:
336
                in_col = sql_gen.to_name_only_col(in_col)
337
            elif is_func_result(in_col): in_col = table # omit col name
338
        return strings.ustr(in_col)
339
    
340
    str_ = strings.ustr(out_table)
341
    if is_func:
342
        str_ += '('
343
        
344
        try: value_in_col = mapping['value']
345
        except KeyError:
346
            str_ += ', '.join((strings.ustr(k)+'='+in_col_str(v)
347
                for k, v in mapping.iteritems()))
348
        else: str_ += in_col_str(value_in_col)
349
        
350
        str_ += ')'
351
    else:
352
        out_col = 'rank'
353
        try: in_col = mapping[out_col]
354
        except KeyError: str_ += '_pkeys'
355
        else: # has a rank column, so hierarchical
356
            str_ += '['+strings.ustr(out_col)+'='+in_col_str(in_col)+']'
357
    return str_
358

    
359
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, default=None,
360
    col_defaults={}, on_error=exc.reraise):
361
    '''Recovers from errors.
362
    Only works under PostgreSQL (uses INSERT RETURNING).
363
    
364
    Warning: This function's normalizing algorithm does not support database
365
    triggers that populate fields covered by the unique constraint used to do
366
    the DISTINCT ON. Such fields must be populated by the mappings instead.
367
    (Other unique constraints and other non-unique fields are not affected by
368
    this restriction on triggers. Note that the primary key will normally not be
369
    the DISTINCT ON constraint, so trigger-populated natural keys are supported
370
    *unless* the input table contains duplicate rows for some generated keys.)
371
    
372
    Note that much of the complexity of the normalizing algorithm is due to
373
    PostgreSQL (and other DB systems) not having a native command for
374
    insert/on duplicate select. This operation is a cross between MySQL's
375
    INSERT ON DUPLICATE KEY UPDATE (which does not provide SELECT
376
    functionality), and PostgreSQL's INSERT RETURNING (which throws an error
377
    on duplicate instead of returning the existing row).
378
    
379
    @param in_tables The main input table to select from, followed by a list of
380
        tables to join with it using the main input table's pkey
381
    @param mapping dict(out_table_col=in_table_col, ...)
382
        * out_table_col: str (*not* sql_gen.Col)
383
        * in_table_col: sql_gen.Col|literal-value
384
    @param default The *output* column to use as the pkey for missing rows.
385
        If this output column does not exist in the mapping, uses None.
386
    @param col_defaults Default values for required columns.
387
    @return sql_gen.Col Where the output pkeys are made available
388
    '''
389
    import psycopg2.extensions
390
    
391
    # Special handling for functions with hstore params
392
    if out_table == '_map':
393
        import psycopg2.extras
394
        psycopg2.extras.register_hstore(db.db)
395
        
396
        # Parse args
397
        try: value = mapping.pop('value')
398
        except KeyError: return None # value required
399
        
400
        mapping = dict([(k, sql_gen.get_value(v))
401
            for k, v in mapping.iteritems()]) # unwrap literal value
402
        mapping = dict(map=mapping, value=value) # non-value params -> hstore
403
    
404
    out_table = sql_gen.as_Table(out_table)
405
    
406
    def log_debug(msg): db.log_debug(msg, level=1.5)
407
    def col_ustr(str_):
408
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
409
    
410
    log_debug('********** New iteration **********')
411
    log_debug('Inserting these input columns into '+strings.as_tt(
412
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
413
    
414
    is_function = sql.function_exists(db, out_table)
415
    
416
    if is_function: row_ct_ref = None # only track inserted rows
417
    
418
    # Warn if inserting empty table rows
419
    if not mapping and not is_function: # functions with no args OK
420
        warnings.warn(UserWarning('Inserting empty table row(s)'))
421
    
422
    if is_function: out_pkey = 'result'
423
    else: out_pkey = sql.pkey_name(db, out_table, recover=True)
424
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
425
    
426
    in_tables_ = copy.copy(in_tables) # don't modify input!
427
    try: in_tables0 = in_tables_.pop(0) # first table is separate
428
    except IndexError: in_tables0 = None
429
    else:
430
        in_pkey = sql.pkey_name(db, in_tables0, recover=True)
431
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
432
    
433
    # Determine if can use optimization for only literal values
434
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
435
        mapping.values()), False)
436
    is_literals_or_function = is_literals or is_function
437
    
438
    if in_tables0 == None: errors_table_ = None
439
    else: errors_table_ = errors_table(db, in_tables0)
440
    
441
    # Create input joins from list of input tables
442
    input_joins = [in_tables0]+[sql_gen.Join(v,
443
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
444
    
445
    orig_mapping = mapping.copy()
446
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
447
        mapping = {out_pkey: None} # ColDict will replace with default value
448
    
449
    if not is_literals:
450
        into = sql_gen.as_Table(into_table_name(out_table, in_tables0, mapping,
451
            is_function))
452
        # Ensure into's out_pkey is different from in_pkey by prepending "out."
453
        if is_function: into_out_pkey = out_pkey
454
        else: into_out_pkey = 'out.'+out_pkey
455
        
456
        # Set column sources
457
        in_cols = filter(sql_gen.is_table_col, mapping.values())
458
        for col in in_cols:
459
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
460
        
461
        log_debug('Joining together input tables into temp table')
462
        # Place in new table so don't modify input and for speed
463
        in_table = sql_gen.Table('in')
464
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
465
            in_cols, preserve=[in_pkey_col]))
466
        input_joins = [in_table]
467
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
468
    
469
    # Wrap mapping in a sql_gen.ColDict.
470
    # sql_gen.ColDict sanitizes both keys and values passed into it.
471
    # Do after applying dicts.join() because that returns a plain dict.
472
    mapping = sql_gen.ColDict(db, out_table, mapping)
473
    
474
    # Save all rows since in_table may have rows deleted
475
    if is_literals: pass
476
    elif is_function: full_in_table = in_table
477
    else:
478
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
479
        sql.copy_table(db, in_table, full_in_table)
480
    
481
    pkeys_table_exists_ref = [False]
482
    def insert_into_pkeys(query, **kw_args):
483
        if pkeys_table_exists_ref[0]:
484
            sql.insert_select(db, into, [in_pkey, into_out_pkey], query,
485
                **kw_args)
486
        else:
487
            kw_args.setdefault('add_pkey_', True)
488
            
489
            sql.run_query_into(db, query, into=into, **kw_args)
490
            pkeys_table_exists_ref[0] = True
491
    
492
    def mk_main_select(joins, cols): return sql.mk_select(db, joins, cols)
493
    
494
    if is_literals: insert_in_table = None
495
    else:
496
        insert_in_table = in_table
497
        insert_in_tables = [insert_in_table]
498
    join_cols = sql_gen.ColDict(db, out_table)
499
    
500
    exc_strs = set()
501
    def log_exc(e):
502
        e_str = exc.str_(e, first_line_only=True)
503
        log_debug('Caught exception: '+e_str)
504
        if e_str in exc_strs: # avoid infinite loops
505
            log_debug('Exception already seen, handler broken')
506
            on_error(e)
507
            remove_all_rows()
508
            return False
509
        else: exc_strs.add(e_str)
510
        return True
511
    
512
    ignore_all_ref = [False]
513
    def remove_all_rows():
514
        log_debug('Ignoring all rows')
515
        ignore_all_ref[0] = True # just return the default value column
516
    
517
    def handle_unknown_exc(e):
518
        log_debug('No handler for exception')
519
        on_error(e)
520
        remove_all_rows()
521
    
522
    def ensure_cond(cond, e, passed=False, failed=False):
523
        if is_literals: # we know the constraint was applied exactly once
524
            if passed: pass
525
            elif failed: remove_all_rows()
526
            else: raise NotImplementedError()
527
        else:
528
            if not is_function:
529
                out_table_cols = sql_gen.ColDict(db, out_table)
530
                out_table_cols.update(util.dict_subset_right_join({},
531
                    sql.table_col_names(db, out_table)))
532
            
533
            in_cols = []
534
            cond = strings.ustr(cond)
535
            orig_cond = cond
536
            cond = sql_gen.map_expr(db, cond, mapping, in_cols)
537
            if not is_function:
538
                cond = sql_gen.map_expr(db, cond, out_table_cols)
539
            
540
            log_debug('Ignoring rows that do not satisfy '+strings.as_tt(cond))
541
            cur = None
542
            if cond == sql_gen.false_expr:
543
                assert failed
544
                remove_all_rows()
545
            elif cond == sql_gen.true_expr: assert passed
546
            else:
547
                while True:
548
                    not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
549
                    try:
550
                        cur = sql.delete(db, insert_in_table, not_cond)
551
                        break
552
                    except sql.DoesNotExistException, e:
553
                        if e.type != 'column': raise
554
                        
555
                        last_cond = cond
556
                        cond = sql_gen.map_expr(db, cond, {e.name: None})
557
                        if cond == last_cond: raise # not fixable
558
            
559
            # If any rows failed cond
560
            if failed or cur != None and cur.rowcount > 0:
561
                track_data_error(db, errors_table_,
562
                    sql_gen.cross_join_srcs(in_cols), None, e.cause.pgcode,
563
                    strings.ensure_newl(strings.ustr(e.cause.pgerror))
564
                    +'condition: '+orig_cond+'\ntranslated condition: '+cond)
565
    
566
    not_null_cols = set()
567
    def ignore(in_col, value, e):
568
        if sql_gen.is_table_col(in_col):
569
            in_col = sql_gen.with_table(in_col, insert_in_table)
570
            
571
            track_data_error(db, errors_table_, in_col.srcs, value,
572
                e.cause.pgcode, e.cause.pgerror)
573
            
574
            sql.add_index(db, in_col, insert_in_table) # enable fast filtering
575
            if value != None and in_col not in not_null_cols:
576
                log_debug('Replacing invalid value '
577
                    +strings.as_tt(strings.urepr(value))+' with NULL in column '
578
                    +strings.as_tt(in_col.to_str(db)))
579
                sql.update(db, insert_in_table, [(in_col, None)],
580
                    sql_gen.ColValueCond(in_col, value))
581
            else:
582
                log_debug('Ignoring rows with '+strings.as_tt(in_col.to_str(db))
583
                    +' = '+strings.as_tt(strings.urepr(value)))
584
                sql.delete(db, insert_in_table,
585
                    sql_gen.ColValueCond(in_col, value))
586
                if value == None: not_null_cols.add(in_col)
587
        else:
588
            assert isinstance(in_col, sql_gen.NamedCol)
589
            in_value = sql_gen.remove_col_rename(in_col)
590
            assert sql_gen.is_literal(in_value)
591
            if value == in_value.value:
592
                if value != None:
593
                    log_debug('Replacing invalid literal '
594
                        +strings.as_tt(in_col.to_str(db))+' with NULL')
595
                    mapping[in_col.name] = None
596
                else:
597
                    remove_all_rows()
598
            # otherwise, all columns were being ignore()d because the specific
599
            # column couldn't be identified, and this was not the invalid column
600
    
601
    if not is_literals:
602
        def insert_pkeys_table(which):
603
            return sql_gen.Table(sql_gen.concat(in_table.name,
604
                '_insert_'+which+'_pkeys'))
605
        insert_out_pkeys = insert_pkeys_table('out')
606
        insert_in_pkeys = insert_pkeys_table('in')
607
    
608
    def mk_func_call():
609
        args = dict(((k.name, v) for k, v in mapping.iteritems()))
610
        return sql_gen.FunctionCall(out_table, **args), args
611
    
612
    missing_msg = None
613
    
614
    # Do inserts and selects
615
    while True:
616
        has_joins = join_cols != {}
617
        
618
        if ignore_all_ref[0]: break # unrecoverable error, so don't do main case
619
        
620
        # Prepare to insert new rows
621
        if is_function:
622
            if is_literals:
623
                log_debug('Calling function')
624
                func_call, args = mk_func_call()
625
        else:
626
            log_debug('Trying to insert new rows')
627
            insert_args = dict(recover=True, cacheable=False)
628
            if has_joins:
629
                insert_args.update(dict(ignore=True))
630
            else:
631
                insert_args.update(dict(returning=out_pkey))
632
                if not is_literals:
633
                    insert_args.update(dict(into=insert_out_pkeys))
634
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
635
                c, insert_in_table) for c in mapping.values()])
636
        
637
        try:
638
            cur = None
639
            if is_function:
640
                if is_literals:
641
                    cur = sql.select(db, fields=[func_call], recover=True,
642
                        cacheable=True)
643
                else:
644
                    log_debug('Defining wrapper function')
645
                    
646
                    func_call, args = mk_func_call()
647
                    func_call = sql_gen.NamedCol(into_out_pkey, func_call)
648
                    
649
                    # Create empty pkeys table so its row type can be used
650
                    insert_into_pkeys(sql.mk_select(db, input_joins,
651
                        [in_pkey_col, func_call], limit=0), add_pkey_=False,
652
                        recover=True)
653
                    result_type = db.col_info(sql_gen.Col(into_out_pkey,
654
                        into)).type
655
                    
656
                    ## Create error handling wrapper function
657
                    
658
                    wrapper = db.TempFunction(sql_gen.concat(into.name,
659
                        '_wrap'))
660
                    
661
                    select_cols = [in_pkey_col]+args.values()
662
                    row_var = copy.copy(sql_gen.row_var)
663
                    row_var.set_srcs([in_table])
664
                    in_pkey_var = sql_gen.Col(in_pkey, row_var)
665
                    
666
                    args = dict(((k, sql_gen.with_table(v, row_var))
667
                        for k, v in args.iteritems()))
668
                    func_call = sql_gen.FunctionCall(out_table, **args)
669
                    
670
                    def mk_return(result):
671
                        return sql_gen.ReturnQuery(sql.mk_select(db,
672
                            fields=[in_pkey_var, result], explain=False))
673
                    exc_handler = func_wrapper_exception_handler(db,
674
                        mk_return(sql_gen.Cast(result_type, None)),
675
                        args.values(), errors_table_)
676
                    
677
                    sql.define_func(db, sql_gen.FunctionDef(wrapper,
678
                        sql_gen.SetOf(into),
679
                        sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
680
                            sql.mk_select(db, input_joins),
681
                            mk_return(func_call), exc_handler=exc_handler)
682
                        ))
683
                    wrapper_table = sql_gen.FunctionCall(wrapper)
684
                    
685
                    log_debug('Calling function')
686
                    insert_into_pkeys(sql.mk_select(db, wrapper_table,
687
                        order_by=None), recover=True, cacheable=False)
688
                    sql.add_pkey_or_index(db, into)
689
            else:
690
                cur = sql.insert_select(db, out_table, mapping.keys(),
691
                    main_select, **insert_args)
692
            break # insert successful
693
        except sql.MissingCastException, e:
694
            if not log_exc(e): break
695
            
696
            type_ = e.type
697
            if e.col == None: out_cols = mapping.keys()
698
            else: out_cols = [e.col]
699
            
700
            for out_col in out_cols:
701
                log_debug('Casting '+strings.as_tt(strings.repr_no_u(out_col))
702
                    +' input to '+strings.as_tt(type_))
703
                in_col = mapping[out_col]
704
                while True:
705
                    try:
706
                        mapping[out_col] = cast_temp_col(db, type_, in_col,
707
                            errors_table_)
708
                        break # cast successful
709
                    except sql.InvalidValueException, e:
710
                        if not log_exc(e): break
711
                        
712
                        ignore(in_col, e.value, e)
713
        except sql.DuplicateKeyException, e:
714
            if not log_exc(e): break
715
            
716
            # Different rows violating different unique constraints not
717
            # supported
718
            assert not join_cols
719
            
720
            if e.cond != None: ensure_cond(e.cond, e, passed=True)
721
            
722
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
723
            log_debug('Ignoring existing rows, comparing on these columns:\n'
724
                +strings.as_inline_table(join_cols, ustr=col_ustr))
725
            
726
            if is_literals:
727
                return sql.value(sql.select(db, out_table, [out_pkey_col],
728
                    join_cols, order_by=None))
729
            
730
            # Uniquify and filter input table to avoid (most) duplicate keys
731
            # (Additional duplicates may be added concurrently and will be
732
            # filtered out separately upon insert.)
733
            insert_in_table = sql.distinct_table(db, insert_in_table,
734
                join_cols.values(), [insert_in_table,
735
                sql_gen.Join(out_table, join_cols, sql_gen.filter_out)])
736
            insert_in_tables.append(insert_in_table)
737
        except sql.NullValueException, e:
738
            if not log_exc(e): break
739
            
740
            out_col, = e.cols
741
            try: in_col = mapping[out_col]
742
            except KeyError, e:
743
                try: in_col = mapping[out_col] = col_defaults[out_col]
744
                except KeyError:
745
                    missing_msg = 'Missing mapping for NOT NULL column '+out_col
746
                    log_debug(missing_msg)
747
                    remove_all_rows()
748
            else: ignore(in_col, None, e)
749
        except sql.CheckException, e:
750
            if not log_exc(e): break
751
            
752
            ensure_cond(e.cond, e, failed=True)
753
        except sql.InvalidValueException, e:
754
            if not log_exc(e): break
755
            
756
            for in_col in mapping.values(): ignore(in_col, e.value, e)
757
        except psycopg2.extensions.TransactionRollbackError, e:
758
            if not log_exc(e): break
759
            # retry
760
        except sql.DatabaseErrors, e:
761
            if not log_exc(e): break
762
            
763
            handle_unknown_exc(e)
764
        # after exception handled, rerun loop with additional constraints
765
    
766
    # Resolve default value column
767
    if default != None:
768
        if ignore_all_ref[0]: mapping.update(orig_mapping) # use input cols
769
        try: default = mapping[default]
770
        except KeyError:
771
            db.log_debug('Default value column '
772
                +strings.as_tt(strings.repr_no_u(default))
773
                +' does not exist in mapping, falling back to None', level=2.1)
774
            default = None
775
        else: default = sql_gen.remove_col_rename(default)
776
    
777
    if missing_msg != None and default == None:
778
        warnings.warn(UserWarning(missing_msg))
779
        # not an error because sometimes the mappings include
780
        # extra tables which aren't used by the dataset
781
    
782
    # Handle unrecoverable errors
783
    if ignore_all_ref[0]:
784
        log_debug('Returning default: '+strings.as_tt(strings.urepr(default)))
785
        return default
786
    
787
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
788
        row_ct_ref[0] += cur.rowcount
789
    
790
    if is_literals: return sql.value(cur)
791
    
792
    if is_function: pass # pkeys table already created
793
    elif has_joins:
794
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
795
        log_debug('Getting output table pkeys of existing/inserted rows')
796
        insert_into_pkeys(sql.mk_select(db, select_joins, [in_pkey_col,
797
            sql_gen.NamedCol(into_out_pkey, out_pkey_col)], order_by=None))
798
    else:
799
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
800
        
801
        log_debug('Getting input table pkeys of inserted rows')
802
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
803
        # since the SELECT query is identical to the one used in INSERT SELECT,
804
        # its rows will be retrieved in the same order.
805
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
806
            into=insert_in_pkeys)
807
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
808
        
809
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
810
            db, insert_in_pkeys)
811
        
812
        log_debug('Combining output and input pkeys in inserted order')
813
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
814
            {sql.row_num_col: sql_gen.join_same_not_null})]
815
        in_col = sql_gen.Col(in_pkey, insert_in_pkeys)
816
        out_col = sql_gen.NamedCol(into_out_pkey,
817
            sql_gen.Col(out_pkey, insert_out_pkeys))
818
        insert_into_pkeys(sql.mk_select(db, pkey_joins, [in_col, out_col],
819
            order_by=None))
820
        
821
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
822
    
823
    if not is_function: # is_function doesn't leave holes
824
        log_debug('Setting pkeys of missing rows to '
825
            +strings.as_tt(strings.urepr(default)))
826
        
827
        full_in_pkey_col = sql_gen.Col(in_pkey, full_in_table)
828
        if sql_gen.is_table_col(default):
829
            default = sql_gen.with_table(default, full_in_table)
830
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
831
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
832
            # must use join_same_not_null or query will take forever
833
        
834
        insert_args = dict(order_by=None)
835
        if not sql.table_has_pkey(db, full_in_table): # in_table has duplicates
836
            insert_args.update(dict(distinct_on=[full_in_pkey_col]))
837
        
838
        insert_into_pkeys(sql.mk_select(db, missing_rows_joins,
839
            [full_in_pkey_col, sql_gen.NamedCol(into_out_pkey, default)],
840
            **insert_args))
841
    # otherwise, there is already an entry for every row
842
    
843
    sql.empty_temp(db, insert_in_tables+[full_in_table])
844
    
845
    srcs = []
846
    if is_function: srcs = sql_gen.cols_srcs(in_cols)
847
    return sql_gen.Col(into_out_pkey, into, srcs)
(37-37/49)