Project

General

Profile

1
# Database import/export
2

    
3
import copy
4
import csv
5
import operator
6
import warnings
7

    
8
import csvs
9
import exc
10
import dicts
11
import sql
12
import sql_gen
13
import strings
14
import util
15

    
16
##### Exceptions
17

    
18
# Can't use built-in SyntaxError because it stringifies to only the first line
19
class SyntaxError(Exception): pass
20

    
21
##### Data cleanup
22

    
23
null_strs = ['', '-', r'\N', 'NULL', 'UNKNOWN', 'nulo']
24

    
25
def cleanup_table(db, table):
26
    table = sql_gen.as_Table(table)
27
    cols = [sql_gen.as_Col(strings.ustr(c), table)
28
        for c in sql.table_cols(db, table)]
29
    cols = filter(lambda c: sql_gen.is_text_col(db, c), cols)
30
    if not cols: return
31
    
32
    db.log_debug('Cleaning up table', level=1.5)
33
    
34
    expr = 'trim(both from %s)'
35
    for null in null_strs: expr = 'nullif('+expr+', '+db.esc_value(null)+')'
36
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db))) for v in cols]
37
    
38
    while True:
39
        try:
40
            sql.update(db, table, changes, in_place=True, recover=True)
41
            break # successful
42
        except sql.NullValueException, e:
43
            db.log_debug('Caught exception: '+exc.str_(e))
44
            col, = e.cols
45
            sql.drop_not_null(db, col)
46
    
47
    db.log_debug('Vacuuming and reanalyzing table', level=1.5)
48
    sql.vacuum(db, table)
49

    
50
##### Error tracking
51

    
52
def track_data_error(db, errors_table, cols, value, error_code, error):
53
    '''
54
    @param errors_table If None, does nothing.
55
    '''
56
    if errors_table == None or cols == (): return
57
    
58
    for col in cols:
59
        try:
60
            sql.insert(db, errors_table, dict(column=col.name, value=value,
61
                error_code=error_code, error=error), recover=True,
62
                cacheable=True, log_level=4)
63
        except sql.DuplicateKeyException: pass
64

    
65
class ExcToErrorsTable(sql_gen.ExcToWarning):
66
    '''Handles an exception by saving it or converting it to a warning.'''
67
    def __init__(self, return_, srcs, errors_table, value=None):
68
        '''
69
        @param return_ See sql_gen.ExcToWarning
70
        @param srcs The column names for the errors table
71
        @param errors_table None|sql_gen.Table
72
        @param value The value (or an expression for it) that caused the error
73
        @pre The invalid value must be in a local variable "value" of type text.
74
        '''
75
        sql_gen.ExcToWarning.__init__(self, return_)
76
        
77
        value = sql_gen.as_Code(value)
78
        
79
        self.srcs = srcs
80
        self.errors_table = errors_table
81
        self.value = value
82
    
83
    def to_str(self, db):
84
        if not self.srcs or self.errors_table == None:
85
            return sql_gen.ExcToWarning.to_str(self, db)
86
        
87
        errors_table_cols = map(sql_gen.Col,
88
            ['column', 'value', 'error_code', 'error'])
89
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
90
            [[c.name] for c in self.srcs]), order_by=None)
91
        insert_query = sql.mk_insert_select(db, self.errors_table,
92
            errors_table_cols,
93
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
94
        return '''\
95
-- Save error in errors table.
96
DECLARE
97
    error_code text := SQLSTATE;
98
    error text := SQLERRM;
99
    value text := '''+self.value.to_str(db)+''';
100
    "column" text;
101
BEGIN
102
    -- Insert the value and error for *each* source column.
103
'''+strings.indent(sql_gen.RowExcIgnore(None, col_names_query, insert_query,
104
    row_var=errors_table_cols[0]).to_str(db))+'''
105
END;
106

    
107
'''+self.return_.to_str(db)
108

    
109
def data_exception_handler(*args, **kw_args):
110
    '''Handles a data_exception by saving it or converting it to a warning.
111
    For params, see ExcToErrorsTable().
112
    '''
113
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
114

    
115
def cast(db, type_, col, errors_table=None):
116
    '''Casts an (unrenamed) column or value.
117
    If errors_table set and col has srcs, saves errors in errors_table (using
118
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
119
    @param col str|sql_gen.Col|sql_gen.Literal
120
    @param errors_table None|sql_gen.Table|str
121
    '''
122
    col = sql_gen.as_Col(col)
123
    
124
    # Don't convert exceptions to warnings for user-supplied constants
125
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
126
    
127
    assert not isinstance(col, sql_gen.NamedCol)
128
    
129
    function_name = strings.first_word(type_)
130
    srcs = col.srcs
131
    save_errors = errors_table != None and srcs
132
    if save_errors: # function will be unique for the given srcs
133
        function_name = strings.ustr(sql_gen.FunctionCall(function_name,
134
            *map(sql_gen.to_name_only_col, srcs)))
135
    function = db.TempFunction(function_name)
136
    
137
    # Create function definition
138
    modifiers = 'STRICT'
139
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
140
    value_param = sql_gen.FunctionParam('value', 'text')
141
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
142
        value_param.name)
143
    body = sql_gen.CustomCode(handler.to_str(db, '''\
144
/* The explicit cast to the return type is needed to make the cast happen
145
inside the try block. (Implicit casts to the return type happen at the end
146
of the function, outside any block.) */
147
RETURN value::'''+type_+''';
148
'''))
149
    body.lang='plpgsql'
150
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
151
        [value_param], modifiers))
152
    
153
    return sql_gen.FunctionCall(function, col)
154

    
155
def func_wrapper_exception_handler(db, return_, args, errors_table):
156
    '''Handles a function call's data_exceptions.
157
    Supports PL/Python functions.
158
    @param return_ See data_exception_handler()
159
    @param args [arg...] Function call's args
160
    @param errors_table See data_exception_handler()
161
    '''
162
    args = filter(sql_gen.has_srcs, args)
163
    
164
    srcs = sql_gen.cross_join_srcs(args)
165
    value = sql_gen.merge_not_null(db, ',', args)
166
    return sql_gen.NestedExcHandler(
167
        data_exception_handler(return_, srcs, errors_table, value)
168
        , sql_gen.plpythonu_error_handler
169
        )
170

    
171
def cast_temp_col(db, type_, col, errors_table=None):
172
    '''Like cast(), but creates a new column with the cast values if the input
173
    is a column.
174
    @return The new column or cast value
175
    '''
176
    def cast_(col): return cast(db, type_, col, errors_table)
177
    
178
    try: col = sql_gen.underlying_col(col)
179
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
180
    
181
    table = col.table
182
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
183
    expr = cast_(col)
184
    
185
    # Add column
186
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
187
    sql.add_col(db, table, new_typed_col, comment=strings.urepr(col)+'::'+type_)
188
    new_col.name = new_typed_col.name # propagate any renaming
189
    
190
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
191
    
192
    return new_col
193

    
194
def errors_table(db, table, if_exists=True):
195
    '''
196
    @param if_exists If set, returns None if the errors table doesn't exist
197
    @return None|sql_gen.Table
198
    '''
199
    table = sql_gen.as_Table(table)
200
    if table.srcs != (): table = table.srcs[0]
201
    
202
    errors_table = sql_gen.suffixed_table(table, '.errors')
203
    if if_exists and not sql.table_exists(db, errors_table): return None
204
    return errors_table
205

    
206
def mk_errors_table(db, table):
207
    errors_table_ = errors_table(db, table, if_exists=False)
208
    if sql.table_exists(db, errors_table_, cacheable=False): return
209
    
210
    typed_cols = [
211
        sql_gen.TypedCol('column', 'text', nullable=False),
212
        sql_gen.TypedCol('value', 'text'),
213
        sql_gen.TypedCol('error_code', 'character varying(5)', nullable=False),
214
        sql_gen.TypedCol('error', 'text', nullable=False),
215
        ]
216
    sql.create_table(db, errors_table_, typed_cols, has_pkey=False)
217
    index_cols = ['column', 'value', 'error_code', 'error']
218
    sql.add_index(db, index_cols, errors_table_, unique=True)
219

    
220
##### Import
221

    
222
def import_csv(db, table, stream, use_copy_from=True, has_row_num=True):
223
    def log(msg, level=1): db.log_debug(msg, level)
224
    
225
    # Get format info
226
    info = csvs.stream_info(stream, parse_header=True)
227
    dialect = info.dialect
228
    if csvs.is_tsv(dialect): use_copy_from = False
229
    col_names = map(strings.to_unicode, info.header)
230
    for i, col in enumerate(col_names): # replace empty column names
231
        if col == '': col_names[i] = 'column_'+str(i)
232
    
233
    # Select schema and escape names
234
    def esc_name(name): return db.esc_name(name)
235
    
236
    typed_cols = [sql_gen.TypedCol(v, 'text') for v in col_names]
237
    
238
    log('Creating table')
239
    sql.create_table(db, table, typed_cols, has_pkey=False, col_indexes=False)
240
    
241
    # Load the data
242
    def load_():
243
        if use_copy_from:
244
            log('Using COPY FROM')
245
            
246
            # Create COPY FROM statement
247
            copy_from = ('COPY '+table.to_str(db)+' FROM STDIN DELIMITER '
248
                +db.esc_value(dialect.delimiter)+' NULL '+db.esc_value(''))
249
            assert not csvs.is_tsv(dialect)
250
            copy_from += ' CSV'
251
            if dialect.quoting != csv.QUOTE_NONE:
252
                quote_str = db.esc_value(dialect.quotechar)
253
                copy_from += ' QUOTE '+quote_str
254
                if dialect.doublequote: copy_from += ' ESCAPE '+quote_str
255
            copy_from += ';\n'
256
            
257
            log(copy_from, level=2)
258
            db.db.cursor().copy_expert(copy_from, stream)
259
        else:
260
            log('Using INSERT')
261
            cols_ct = len(col_names)
262
            for row in csvs.make_reader(stream, dialect):
263
                row = map(strings.to_unicode, row)
264
                util.list_set_length(row, cols_ct) # truncate extra cols
265
                sql.insert(db, table, row, cacheable=False, log_level=5)
266
    sql.with_savepoint(db, load_)
267
    
268
    if has_row_num: sql.add_row_num(db, table)
269
    cleanup_table(db, table)
270

    
271
def put(db, table, row, pkey_=None, row_ct_ref=None):
272
    '''Recovers from errors.
273
    Only works under PostgreSQL (uses INSERT RETURNING).
274
    '''
275
    return put_table(db, table, [], row, row_ct_ref)
276

    
277
def get(db, table, row, pkey, row_ct_ref=None, create=False):
278
    '''Recovers from errors'''
279
    try:
280
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
281
            recover=True))
282
    except StopIteration:
283
        if not create: raise
284
        return put(db, table, row, pkey, row_ct_ref) # insert new row
285

    
286
def is_func_result(col):
287
    return col.table.name.find('(') >= 0 and col.name == 'result'
288

    
289
def into_table_name(out_table, in_tables0, mapping, is_func):
290
    def in_col_str(in_col):
291
        in_col = sql_gen.remove_col_rename(in_col)
292
        if isinstance(in_col, sql_gen.Col):
293
            table = in_col.table
294
            if table == in_tables0:
295
                in_col = sql_gen.to_name_only_col(in_col)
296
            elif is_func_result(in_col): in_col = table # omit col name
297
        return strings.ustr(in_col)
298
    
299
    str_ = strings.ustr(out_table)
300
    if is_func:
301
        str_ += '('
302
        
303
        try: value_in_col = mapping['value']
304
        except KeyError:
305
            str_ += ', '.join((strings.ustr(k)+'='+in_col_str(v)
306
                for k, v in mapping.iteritems()))
307
        else: str_ += in_col_str(value_in_col)
308
        
309
        str_ += ')'
310
    else:
311
        out_col = 'rank'
312
        try: in_col = mapping[out_col]
313
        except KeyError: str_ += '_pkeys'
314
        else: # has a rank column, so hierarchical
315
            str_ += '['+strings.ustr(out_col)+'='+in_col_str(in_col)+']'
316
    return str_
317

    
318
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, default=None,
319
    col_defaults={}, on_error=exc.reraise):
320
    '''Recovers from errors.
321
    Only works under PostgreSQL (uses INSERT RETURNING).
322
    IMPORTANT: Must be run at the *beginning* of a transaction.
323
    @param in_tables The main input table to select from, followed by a list of
324
        tables to join with it using the main input table's pkey
325
    @param mapping dict(out_table_col=in_table_col, ...)
326
        * out_table_col: str (*not* sql_gen.Col)
327
        * in_table_col: sql_gen.Col|literal-value
328
    @param default The *output* column to use as the pkey for missing rows.
329
        If this output column does not exist in the mapping, uses None.
330
    @param col_defaults Default values for required columns.
331
    @return sql_gen.Col Where the output pkeys are made available
332
    '''
333
    import psycopg2.extensions
334
    
335
    out_table = sql_gen.as_Table(out_table)
336
    
337
    def log_debug(msg): db.log_debug(msg, level=1.5)
338
    def col_ustr(str_):
339
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
340
    
341
    log_debug('********** New iteration **********')
342
    log_debug('Inserting these input columns into '+strings.as_tt(
343
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
344
    
345
    is_function = sql.function_exists(db, out_table)
346
    
347
    # Warn if inserting empty table rows
348
    if not mapping and not is_function: # functions with no args OK
349
        warnings.warn(UserWarning('Inserting empty table row(s)'))
350
    
351
    if is_function: out_pkey = 'result'
352
    else: out_pkey = sql.pkey(db, out_table, recover=True)
353
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
354
    
355
    in_tables_ = in_tables[:] # don't modify input!
356
    try: in_tables0 = in_tables_.pop(0) # first table is separate
357
    except IndexError: in_tables0 = None
358
    else:
359
        in_pkey = sql.pkey(db, in_tables0, recover=True)
360
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
361
    
362
    # Determine if can use optimization for only literal values
363
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
364
        mapping.values()), False)
365
    is_literals_or_function = is_literals or is_function
366
    
367
    if in_tables0 == None: errors_table_ = None
368
    else: errors_table_ = errors_table(db, in_tables0)
369
    
370
    # Create input joins from list of input tables
371
    input_joins = [in_tables0]+[sql_gen.Join(v,
372
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
373
    
374
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
375
        mapping = {out_pkey: None} # ColDict will replace with default value
376
    
377
    if not is_literals:
378
        into = sql_gen.as_Table(into_table_name(out_table, in_tables0, mapping,
379
            is_function))
380
        # Ensure into's out_pkey is different from in_pkey by prepending table
381
        if is_function: into_out_pkey = out_pkey
382
        else: into_out_pkey = strings.ustr(out_pkey_col)
383
        
384
        # Set column sources
385
        in_cols = filter(sql_gen.is_table_col, mapping.values())
386
        for col in in_cols:
387
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
388
        
389
        log_debug('Joining together input tables into temp table')
390
        # Place in new table so don't modify input and for speed
391
        in_table = sql_gen.Table('in')
392
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
393
            in_cols, preserve=[in_pkey_col]))
394
        input_joins = [in_table]
395
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
396
    
397
    # Wrap mapping in a sql_gen.ColDict.
398
    # sql_gen.ColDict sanitizes both keys and values passed into it.
399
    # Do after applying dicts.join() because that returns a plain dict.
400
    mapping = sql_gen.ColDict(db, out_table, mapping)
401
    
402
    # Resolve default value column
403
    if default != None:
404
        try: default = mapping[default]
405
        except KeyError:
406
            db.log_debug('Default value column '
407
                +strings.as_tt(strings.repr_no_u(default))
408
                +' does not exist in mapping, falling back to None', level=2.1)
409
            default = None
410
    
411
    # Save default values for all rows since in_table may have rows deleted
412
    if is_literals: pass
413
    elif is_function: full_in_table = in_table
414
    else:
415
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
416
        full_in_table_cols = [in_pkey_col]
417
        if default != None:
418
            full_in_table_cols.append(default)
419
            default = sql_gen.with_table(default, full_in_table)
420
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
421
            order_by=None), into=full_in_table, add_pkey_=True)
422
    
423
    pkeys_table_exists_ref = [False]
424
    def insert_into_pkeys(joins, cols=None, limit=None, **kw_args):
425
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
426
        if pkeys_table_exists_ref[0]:
427
            sql.insert_select(db, into, [in_pkey, into_out_pkey], query,
428
                **kw_args)
429
        else:
430
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
431
            pkeys_table_exists_ref[0] = True
432
    
433
    limit_ref = [None]
434
    def mk_main_select(joins, cols):
435
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
436
    
437
    if is_literals: insert_in_table = None
438
    else:
439
        insert_in_table = in_table
440
        insert_in_tables = [insert_in_table]
441
    join_cols = sql_gen.ColDict(db, out_table)
442
    
443
    exc_strs = set()
444
    def log_exc(e):
445
        e_str = exc.str_(e, first_line_only=True)
446
        log_debug('Caught exception: '+e_str)
447
        if e_str in exc_strs: # avoid infinite loops
448
            log_debug('Exception already seen, handler broken')
449
            on_error(e)
450
            remove_all_rows()
451
        else: exc_strs.add(e_str)
452
    
453
    def remove_all_rows():
454
        log_debug('Ignoring all rows')
455
        limit_ref[0] = 0 # just create an empty pkeys table
456
    
457
    def ignore_cond(cond, e):
458
        if is_literals: remove_all_rows()
459
        else:
460
            out_table_cols = sql_gen.ColDict(db, out_table)
461
            out_table_cols.update(util.dict_subset_right_join({},
462
                sql.table_cols(db, out_table)))
463
            
464
            in_cols = []
465
            cond = sql.map_expr(db, cond, mapping, in_cols)
466
            cond = sql.map_expr(db, cond, out_table_cols)
467
            
468
            track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols),
469
                None, e.cause.pgcode,
470
                strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
471
            
472
            not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
473
            log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
474
            sql.delete(db, insert_in_table, not_cond)
475
    
476
    not_null_cols = set()
477
    def ignore(in_col, value, e):
478
        if sql_gen.is_table_col(in_col):
479
            in_col = sql_gen.with_table(in_col, insert_in_table)
480
            
481
            track_data_error(db, errors_table_, in_col.srcs, value,
482
                e.cause.pgcode, e.cause.pgerror)
483
            
484
            sql.add_index(db, in_col, insert_in_table) # enable fast filtering
485
            if value != None and in_col not in not_null_cols:
486
                log_debug('Replacing invalid value '
487
                    +strings.as_tt(strings.urepr(value))+' with NULL in column '
488
                    +strings.as_tt(in_col.to_str(db)))
489
                sql.update(db, insert_in_table, [(in_col, None)],
490
                    sql_gen.ColValueCond(in_col, value))
491
            else:
492
                log_debug('Ignoring rows with '+strings.as_tt(in_col.to_str(db))
493
                    +' = '+strings.as_tt(strings.urepr(value)))
494
                sql.delete(db, insert_in_table,
495
                    sql_gen.ColValueCond(in_col, value))
496
                if value == None: not_null_cols.add(in_col)
497
        else:
498
            assert isinstance(in_col, sql_gen.NamedCol)
499
            in_value = sql_gen.remove_col_rename(in_col)
500
            assert sql_gen.is_literal(in_value)
501
            if value == in_value.value:
502
                if value != None:
503
                    log_debug('Replacing invalid literal '
504
                        +strings.as_tt(in_col.to_str(db))+' with NULL')
505
                    mapping[in_col.name] = None
506
                else:
507
                    remove_all_rows()
508
            # otherwise, all columns were being ignore()d because the specific
509
            # column couldn't be identified, and this was not the invalid column
510
    
511
    if not is_literals:
512
        def insert_pkeys_table(which):
513
            return sql_gen.Table(sql_gen.concat(in_table.name,
514
                '_insert_'+which+'_pkeys'))
515
        insert_out_pkeys = insert_pkeys_table('out')
516
        insert_in_pkeys = insert_pkeys_table('in')
517
    
518
    def mk_func_call():
519
        args = dict(((k.name, v) for k, v in mapping.iteritems()))
520
        return sql_gen.FunctionCall(out_table, **args), args
521
        
522
    if is_function and not is_literals:
523
        log_debug('Defining wrapper function')
524
        
525
        func_call, args = mk_func_call()
526
        func_call = sql_gen.NamedCol(into_out_pkey, func_call)
527
        
528
        # Create empty pkeys table so its row type can be used
529
        insert_into_pkeys(input_joins, [in_pkey_col, func_call], limit=0,
530
            recover=True)
531
        result_type = db.col_info(sql_gen.Col(into_out_pkey, into)).type
532
        
533
        ## Create error handling wrapper function
534
        
535
        wrapper = db.TempFunction(sql_gen.concat(into.name, '_wrap'))
536
        
537
        select_cols = [in_pkey_col]+args.values()
538
        row_var = copy.copy(sql_gen.row_var)
539
        row_var.set_srcs([in_table])
540
        in_pkey_var = sql_gen.Col(in_pkey, row_var)
541
        
542
        args = dict(((k, sql_gen.with_table(v, row_var))
543
            for k, v in args.iteritems()))
544
        func_call = sql_gen.FunctionCall(out_table, **args)
545
        
546
        def mk_return(result):
547
            return sql_gen.ReturnQuery(sql.mk_select(db,
548
                fields=[in_pkey_var, result], explain=False))
549
        exc_handler = func_wrapper_exception_handler(db,
550
            mk_return(sql_gen.Cast(result_type, None)), args.values(),
551
            errors_table_)
552
        
553
        sql.define_func(db, sql_gen.FunctionDef(wrapper, sql_gen.SetOf(into),
554
            sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
555
                sql.mk_select(db, input_joins, order_by=None),
556
                mk_return(func_call), exc_handler=exc_handler)
557
            ))
558
        wrapper_table = sql_gen.FunctionCall(wrapper)
559
    
560
    # Do inserts and selects
561
    while True:
562
        has_joins = join_cols != {}
563
        
564
        # Handle unrecoverable errors in a special case
565
        if limit_ref[0] == 0:
566
            if is_literals or default == None:
567
                default = sql_gen.remove_col_rename(default)
568
                log_debug('Returning default: '
569
                    +strings.as_tt(strings.urepr(default)))
570
                return default
571
            elif is_function: pass # empty pkeys table already created
572
            else:
573
                log_debug('Creating an empty output pkeys table')
574
                has_joins = False # use the no-joins case
575
                cur = sql.run_query_into(db, sql.mk_select(db, out_table,
576
                    [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
577
            
578
            break # don't do main case
579
        
580
        # Prepare to insert new rows
581
        if is_function:
582
            log_debug('Calling function on input rows')
583
            if is_literals: func_call, args = mk_func_call()
584
        else:
585
            log_debug('Trying to insert new rows')
586
            insert_args = dict(recover=True, cacheable=False)
587
            if has_joins:
588
                insert_args.update(dict(ignore=True))
589
            else:
590
                insert_args.update(dict(returning=out_pkey))
591
                if not is_literals:
592
                    insert_args.update(dict(into=insert_out_pkeys))
593
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
594
                c, insert_in_table) for c in mapping.values()])
595
        
596
        try:
597
            cur = None
598
            if is_function:
599
                if is_literals:
600
                    cur = sql.select(db, fields=[func_call], recover=True,
601
                        cacheable=True)
602
                else: insert_into_pkeys(wrapper_table, recover=True)
603
            else:
604
                cur = sql.insert_select(db, out_table, mapping.keys(),
605
                    main_select, **insert_args)
606
            break # insert successful
607
        except sql.MissingCastException, e:
608
            log_exc(e)
609
            
610
            type_ = e.type
611
            if e.col == None: out_cols = mapping.keys()
612
            else: out_cols = [e.col]
613
            
614
            for out_col in out_cols:
615
                log_debug('Casting '+strings.as_tt(strings.repr_no_u(out_col))
616
                    +' input to '+strings.as_tt(type_))
617
                in_col = mapping[out_col]
618
                while True:
619
                    try:
620
                        mapping[out_col] = cast_temp_col(db, type_, in_col,
621
                            errors_table_)
622
                        break # cast successful
623
                    except sql.InvalidValueException, e:
624
                        log_exc(e)
625
                        
626
                        ignore(in_col, e.value, e)
627
        except sql.DuplicateKeyException, e:
628
            log_exc(e)
629
            
630
            # Different rows violating different unique constraints not
631
            # supported
632
            assert not join_cols
633
            
634
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
635
            log_debug('Ignoring existing rows, comparing on these columns:\n'
636
                +strings.as_inline_table(join_cols, ustr=col_ustr))
637
            
638
            if is_literals:
639
                return sql.value(sql.select(db, out_table, [out_pkey_col],
640
                    join_cols, order_by=None))
641
            
642
            # Uniquify input table to avoid internal duplicate keys
643
            insert_in_table = sql.distinct_table(db, insert_in_table,
644
                join_cols.values())
645
            insert_in_tables.append(insert_in_table)
646
        except sql.NullValueException, e:
647
            log_exc(e)
648
            
649
            out_col, = e.cols
650
            try: in_col = mapping[out_col]
651
            except KeyError, e:
652
                try: in_col = mapping[out_col] = col_defaults[out_col]
653
                except KeyError:
654
                    msg = 'Missing mapping for NOT NULL column '+out_col
655
                    log_debug(msg)
656
                    if default == None: warnings.warn(UserWarning(msg))
657
                        # not an error because sometimes the mappings include
658
                        # extra tables which aren't used by the dataset
659
                    remove_all_rows()
660
            else: ignore(in_col, None, e)
661
        except sql.CheckException, e:
662
            log_exc(e)
663
            
664
            ignore_cond(e.cond, e)
665
        except sql.InvalidValueException, e:
666
            log_exc(e)
667
            
668
            for in_col in mapping.values(): ignore(in_col, e.value, e)
669
        except psycopg2.extensions.TransactionRollbackError, e:
670
            log_exc(e)
671
            # retry
672
        except sql.DatabaseErrors, e:
673
            log_exc(e)
674
            
675
            log_debug('No handler for exception')
676
            on_error(e)
677
            remove_all_rows()
678
        # after exception handled, rerun loop with additional constraints
679
    
680
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
681
        row_ct_ref[0] += cur.rowcount
682
    
683
    if is_literals: return sql.value(cur)
684
    
685
    if is_function: pass # pkeys table already created
686
    elif has_joins:
687
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
688
        log_debug('Getting output table pkeys of existing/inserted rows')
689
        insert_into_pkeys(select_joins, [in_pkey_col,
690
            sql_gen.NamedCol(into_out_pkey, out_pkey_col)])
691
    else:
692
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
693
        
694
        log_debug('Getting input table pkeys of inserted rows')
695
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
696
        # since the SELECT query is identical to the one used in INSERT SELECT,
697
        # its rows will be retrieved in the same order.
698
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
699
            into=insert_in_pkeys)
700
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
701
        
702
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
703
            db, insert_in_pkeys)
704
        
705
        log_debug('Combining output and input pkeys in inserted order')
706
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
707
            {sql.row_num_col: sql_gen.join_same_not_null})]
708
        in_col = sql_gen.Col(in_pkey, insert_in_pkeys)
709
        out_col = sql_gen.NamedCol(into_out_pkey,
710
            sql_gen.Col(out_pkey, insert_out_pkeys))
711
        insert_into_pkeys(pkey_joins, [in_col, out_col])
712
        
713
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
714
    
715
    if limit_ref[0] == 0 or not is_function: # is_function doesn't leave holes
716
        log_debug('Setting pkeys of missing rows to '
717
            +strings.as_tt(strings.urepr(default)))
718
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
719
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
720
            # must use join_same_not_null or query will take forever
721
        insert_into_pkeys(missing_rows_joins,
722
            [sql_gen.Col(in_pkey, full_in_table),
723
            sql_gen.NamedCol(into_out_pkey, default)])
724
    # otherwise, there is already an entry for every row
725
    
726
    assert (sql.table_row_count(db, into)
727
        == sql.table_row_count(db, full_in_table))
728
    
729
    sql.empty_temp(db, insert_in_tables+[full_in_table])
730
    
731
    srcs = []
732
    if is_function: srcs = sql_gen.cols_srcs(in_cols)
733
    return sql_gen.Col(into_out_pkey, into, srcs)
(29-29/41)