Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
class ExcToErrorsTable(sql_gen.ExcToWarning):
41
    '''Handles an exception by saving it or converting it to a warning.'''
42
    def __init__(self, return_, srcs, errors_table, value=None):
43
        '''
44
        @param return_ See sql_gen.ExcToWarning
45
        @param srcs The column names for the errors table
46
        @param errors_table None|sql_gen.Table
47
        @param value The value (or an expression for it) that caused the error
48
        @pre The invalid value must be in a local variable "value" of type text.
49
        '''
50
        sql_gen.ExcToWarning.__init__(self, return_)
51
        
52
        value = sql_gen.as_Code(value)
53
        
54
        self.srcs = srcs
55
        self.errors_table = errors_table
56
        self.value = value
57
    
58
    def to_str(self, db):
59
        if not self.srcs or self.errors_table == None:
60
            return sql_gen.ExcToWarning.to_str(self, db)
61
        
62
        errors_table_cols = map(sql_gen.Col,
63
            ['column', 'value', 'error_code', 'error'])
64
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
65
            [[c.name] for c in self.srcs]), order_by=None)
66
        insert_query = sql.mk_insert_select(db, self.errors_table,
67
            errors_table_cols,
68
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
69
        return '''\
70
-- Save error in errors table.
71
DECLARE
72
    error_code text := SQLSTATE;
73
    error text := SQLERRM;
74
    value text := '''+self.value.to_str(db)+''';
75
BEGIN
76
    -- Insert the value and error for *each* source column.
77
'''+strings.indent(sql_gen.RowExcIgnore('text', col_names_query, insert_query,
78
    row_var=errors_table_cols[0]).to_str(db))+'''
79
END;
80

    
81
'''+self.return_.to_str(db)
82

    
83
def data_exception_handler(*args, **kw_args):
84
    '''Handles a data_exception by saving it or converting it to a warning.
85
    For params, see ExcToErrorsTable().
86
    '''
87
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
88

    
89
def cast(db, type_, col, errors_table=None):
90
    '''Casts an (unrenamed) column or value.
91
    If errors_table set and col has srcs, saves errors in errors_table (using
92
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
93
    @param col str|sql_gen.Col|sql_gen.Literal
94
    @param errors_table None|sql_gen.Table|str
95
    '''
96
    col = sql_gen.as_Col(col)
97
    
98
    # Don't convert exceptions to warnings for user-supplied constants
99
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
100
    
101
    assert not isinstance(col, sql_gen.NamedCol)
102
    
103
    function_name = strings.first_word(type_)
104
    srcs = col.srcs
105
    save_errors = errors_table != None and srcs
106
    if save_errors: # function will be unique for the given srcs
107
        function_name = str(sql_gen.FunctionCall(function_name,
108
            *map(sql_gen.to_name_only_col, srcs)))
109
    function = db.TempFunction(function_name)
110
    
111
    # Create function definition
112
    modifiers = 'STRICT'
113
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
114
    value_param = sql_gen.FunctionParam('value', 'text')
115
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
116
        value_param.name)
117
    body = sql_gen.CustomCode(handler.to_str(db, '''\
118
/* The explicit cast to the return type is needed to make the cast happen
119
inside the try block. (Implicit casts to the return type happen at the end
120
of the function, outside any block.) */
121
RETURN value::'''+type_+''';
122
'''))
123
    body.lang='plpgsql'
124
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
125
        [value_param], modifiers))
126
    
127
    return sql_gen.FunctionCall(function, col)
128

    
129
def func_wrapper_exception_handler(return_, args, errors_table):
130
    '''Handles a function call's data_exceptions.
131
    Supports PL/Python functions.
132
    @param return_ See data_exception_handler()
133
    @param args [arg...] Function call's args
134
    @param errors_table See data_exception_handler()
135
    '''
136
    args = filter(sql_gen.has_srcs, args)
137
    
138
    srcs = sql_gen.cross_join_srcs(args)
139
    value = sql_gen.ArrayJoin(',', args)
140
    return sql_gen.NestedExcHandler(
141
        data_exception_handler(return_, srcs, errors_table, value)
142
        , sql_gen.plpythonu_error_handler
143
        )
144

    
145
def cast_temp_col(db, type_, col, errors_table=None):
146
    '''Like cast(), but creates a new column with the cast values if the input
147
    is a column.
148
    @return The new column or cast value
149
    '''
150
    def cast_(col): return cast(db, type_, col, errors_table)
151
    
152
    try: col = sql_gen.underlying_col(col)
153
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
154
    
155
    table = col.table
156
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
157
    expr = cast_(col)
158
    
159
    # Add column
160
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
161
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
162
    new_col.name = new_typed_col.name # propagate any renaming
163
    
164
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
165
    
166
    return new_col
167

    
168
def errors_table(db, table, if_exists=True):
169
    '''
170
    @param if_exists If set, returns None if the errors table doesn't exist
171
    @return None|sql_gen.Table
172
    '''
173
    table = sql_gen.as_Table(table)
174
    if table.srcs != (): table = table.srcs[0]
175
    
176
    errors_table = sql_gen.suffixed_table(table, '.errors')
177
    if if_exists and not sql.table_exists(db, errors_table): return None
178
    return errors_table
179

    
180
##### Import
181

    
182
def put(db, table, row, pkey_=None, row_ct_ref=None):
183
    '''Recovers from errors.
184
    Only works under PostgreSQL (uses INSERT RETURNING).
185
    '''
186
    row = sql_gen.ColDict(db, table, row)
187
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
188
    
189
    try:
190
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
191
        if row_ct_ref != None and cur.rowcount >= 0:
192
            row_ct_ref[0] += cur.rowcount
193
        return sql.value(cur)
194
    except sql.DuplicateKeyException, e:
195
        row = sql_gen.ColDict(db, table,
196
            util.dict_subset_right_join(row, e.cols))
197
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
198
            log_level=3.5))
199
    except sql.NullValueException: return None
200

    
201
def get(db, table, row, pkey, row_ct_ref=None, create=False):
202
    '''Recovers from errors'''
203
    try:
204
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
205
            recover=True))
206
    except StopIteration:
207
        if not create: raise
208
        return put(db, table, row, pkey, row_ct_ref) # insert new row
209

    
210
def is_func_result(col):
211
    return col.table.name.find('(') >= 0 and col.name == 'result'
212

    
213
def into_table_name(out_table, in_tables0, mapping, is_func):
214
    def in_col_str(in_col):
215
        in_col = sql_gen.remove_col_rename(in_col)
216
        if isinstance(in_col, sql_gen.Col):
217
            table = in_col.table
218
            if table == in_tables0:
219
                in_col = sql_gen.to_name_only_col(in_col)
220
            elif is_func_result(in_col): in_col = table # omit col name
221
        return str(in_col)
222
    
223
    str_ = str(out_table)
224
    if is_func:
225
        str_ += '('
226
        
227
        try: value_in_col = mapping['value']
228
        except KeyError:
229
            str_ += ', '.join((str(k)+'='+in_col_str(v)
230
                for k, v in mapping.iteritems()))
231
        else: str_ += in_col_str(value_in_col)
232
        
233
        str_ += ')'
234
    else:
235
        out_col = 'rank'
236
        try: in_col = mapping[out_col]
237
        except KeyError: str_ += '_pkeys'
238
        else: # has a rank column, so hierarchical
239
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
240
    return str_
241

    
242
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
243
    default=None, is_func=False, on_error=exc.raise_):
244
    '''Recovers from errors.
245
    Only works under PostgreSQL (uses INSERT RETURNING).
246
    IMPORTANT: Must be run at the *beginning* of a transaction.
247
    @param in_tables The main input table to select from, followed by a list of
248
        tables to join with it using the main input table's pkey
249
    @param mapping dict(out_table_col=in_table_col, ...)
250
        * out_table_col: str (*not* sql_gen.Col)
251
        * in_table_col: sql_gen.Col|literal-value
252
    @param into The table to contain the output and input pkeys.
253
        Defaults to `out_table.name+'_pkeys'`.
254
    @param default The *output* column to use as the pkey for missing rows.
255
        If this output column does not exist in the mapping, uses None.
256
    @param is_func Whether out_table is the name of a SQL function, not a table
257
    @return sql_gen.Col Where the output pkeys are made available
258
    '''
259
    import psycopg2.extensions
260
    
261
    out_table = sql_gen.as_Table(out_table)
262
    
263
    def log_debug(msg): db.log_debug(msg, level=1.5)
264
    def col_ustr(str_):
265
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
266
    
267
    log_debug('********** New iteration **********')
268
    log_debug('Inserting these input columns into '+strings.as_tt(
269
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
270
    
271
    is_function = sql.function_exists(db, out_table)
272
    
273
    if is_function: out_pkey = 'result'
274
    else: out_pkey = sql.pkey(db, out_table, recover=True)
275
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
276
    
277
    in_tables_ = in_tables[:] # don't modify input!
278
    try: in_tables0 = in_tables_.pop(0) # first table is separate
279
    except IndexError: in_tables0 = None
280
    else:
281
        in_pkey = sql.pkey(db, in_tables0, recover=True)
282
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
283
    
284
    # Determine if can use optimization for only literal values
285
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
286
        mapping.values()), False)
287
    is_literals_or_function = is_literals or is_function
288
    
289
    if in_tables0 == None: errors_table_ = None
290
    else: errors_table_ = errors_table(db, in_tables0)
291
    
292
    # Create input joins from list of input tables
293
    input_joins = [in_tables0]+[sql_gen.Join(v,
294
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
295
    
296
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
297
        mapping = {out_pkey: None} # ColDict will replace with default value
298
    
299
    if not is_literals:
300
        if into == None:
301
            into = into_table_name(out_table, in_tables0, mapping, is_func)
302
        into = sql_gen.as_Table(into)
303
        
304
        # Set column sources
305
        in_cols = filter(sql_gen.is_table_col, mapping.values())
306
        for col in in_cols:
307
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
308
        
309
        log_debug('Joining together input tables into temp table')
310
        # Place in new table so don't modify input and for speed
311
        in_table = sql_gen.Table('in')
312
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
313
            in_cols, preserve=[in_pkey_col]))
314
        input_joins = [in_table]
315
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
316
    
317
    mapping = sql_gen.ColDict(db, out_table, mapping)
318
        # after applying dicts.join() because that returns a plain dict
319
    
320
    # Resolve default value column
321
    if default != None:
322
        try: default = mapping[default]
323
        except KeyError:
324
            db.log_debug('Default value column '
325
                +strings.as_tt(strings.repr_no_u(default))
326
                +' does not exist in mapping, falling back to None', level=2.1)
327
            default = None
328
    
329
    # Save default values for all rows since in_table may have rows deleted
330
    if is_literals: pass
331
    elif is_function: full_in_table = in_table
332
    else:
333
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
334
        full_in_table_cols = [in_pkey_col]
335
        if default != None:
336
            full_in_table_cols.append(default)
337
            default = sql_gen.with_table(default, full_in_table)
338
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
339
            order_by=None), into=full_in_table, add_pkey_=True)
340
    
341
    if not is_literals:
342
        pkeys_names = [in_pkey, out_pkey]
343
        pkeys_cols = [in_pkey_col, out_pkey_col]
344
    
345
    pkeys_table_exists_ref = [False]
346
    def insert_into_pkeys(joins, cols=None, limit=None, **kw_args):
347
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
348
        if pkeys_table_exists_ref[0]:
349
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
350
        else:
351
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
352
            pkeys_table_exists_ref[0] = True
353
    
354
    limit_ref = [None]
355
    def mk_main_select(joins, cols):
356
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
357
    
358
    if is_literals: insert_in_table = None
359
    else:
360
        insert_in_table = in_table
361
        insert_in_tables = [insert_in_table]
362
    join_cols = sql_gen.ColDict(db, out_table)
363
    
364
    exc_strs = set()
365
    def log_exc(e):
366
        e_str = exc.str_(e, first_line_only=True)
367
        log_debug('Caught exception: '+e_str)
368
        assert e_str not in exc_strs # avoid infinite loops
369
        exc_strs.add(e_str)
370
    
371
    def remove_all_rows():
372
        log_debug('Ignoring all rows')
373
        limit_ref[0] = 0 # just create an empty pkeys table
374
    
375
    def ignore_cond(cond, e):
376
        out_table_cols = sql_gen.ColDict(db, out_table)
377
        out_table_cols.update(util.dict_subset_right_join({},
378
            sql.table_cols(db, out_table)))
379
        
380
        in_cols = []
381
        cond = sql.map_expr(db, cond, mapping, in_cols)
382
        cond = sql.map_expr(db, cond, out_table_cols)
383
        
384
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
385
            e.cause.pgcode,
386
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
387
        
388
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
389
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
390
        sql.delete(db, insert_in_table, not_cond)
391
    
392
    not_null_cols = set()
393
    def ignore(in_col, value, e):
394
        in_col = sql_gen.with_table(in_col, insert_in_table)
395
        
396
        track_data_error(db, errors_table_, in_col.srcs, value,
397
            e.cause.pgcode, e.cause.pgerror)
398
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
399
            +strings.as_tt(repr(value)))
400
        
401
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
402
        if value != None and in_col not in not_null_cols:
403
            # Try just mapping the value to NULL
404
            sql.update(db, insert_in_table, [(in_col, None)],
405
                sql_gen.ColValueCond(in_col, value))
406
        else:
407
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
408
            if value == None: not_null_cols.add(in_col)
409
    
410
    if not is_literals:
411
        def insert_pkeys_table(which):
412
            return sql_gen.Table(sql_gen.concat(in_table.name,
413
                '_insert_'+which+'_pkeys'))
414
        insert_out_pkeys = insert_pkeys_table('out')
415
        insert_in_pkeys = insert_pkeys_table('in')
416
    
417
    # Do inserts and selects
418
    while True:
419
        has_joins = join_cols != {}
420
        
421
        if limit_ref[0] == 0: # special case
422
            assert not has_joins
423
            
424
            if is_literals: return None
425
            log_debug('Creating an empty output pkeys table')
426
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
427
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
428
            break # don't do main case
429
        
430
        log_debug('Trying to insert new rows')
431
        
432
        # Prepare to insert new rows
433
        if is_function:
434
            log_debug('Calling function on input rows')
435
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
436
            func_call = sql_gen.NamedCol(out_pkey,
437
                sql_gen.FunctionCall(out_table, **args))
438
            
439
            if not is_literals:
440
                # Create empty pkeys table so its row type can be used
441
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
442
                    limit=0, recover=True)
443
                
444
                ## Create error handling wrapper function
445
                
446
                wrapper = db.TempFunction(sql_gen.concat(into.name, '_wrap'))
447
                
448
                select_cols = [in_pkey_col]+args.values()
449
                row_var = sql_gen.Table('row')
450
                in_pkey_var = sql_gen.Col(in_pkey, 'row')
451
                
452
                args = dict(((k, sql_gen.with_table(v, row_var))
453
                    for k, v in args.iteritems()))
454
                func_call = sql_gen.FunctionCall(out_table, **args)
455
                
456
                def mk_return(result):
457
                    return sql_gen.ReturnQuery(sql.mk_select(db,
458
                        fields=[in_pkey_var, result], explain=False))
459
                exc_handler = func_wrapper_exception_handler(mk_return(None),
460
                    args.values(), errors_table_)
461
                
462
                sql.define_func(db, sql_gen.FunctionDef(wrapper,
463
                    sql_gen.SetOf(into),
464
                    sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
465
                        sql.mk_select(db, input_joins, order_by=None),
466
                        mk_return(func_call), exc_handler=exc_handler)
467
                    ))
468
                wrapper_table = sql_gen.FunctionCall(wrapper)
469
        else:
470
            insert_args = dict(recover=True, cacheable=False)
471
            if has_joins:
472
                insert_args.update(dict(ignore=True))
473
            else:
474
                insert_args.update(dict(returning=out_pkey))
475
                if not is_literals:
476
                    insert_args.update(dict(into=insert_out_pkeys))
477
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
478
                c, insert_in_table) for c in mapping.values()])
479
        
480
        try:
481
            cur = None
482
            if is_function:
483
                if is_literals: cur = sql.select(db, fields=[func_call])
484
                else: insert_into_pkeys(wrapper_table, recover=True)
485
            else:
486
                cur = sql.insert_select(db, out_table, mapping.keys(),
487
                    main_select, **insert_args)
488
            break # insert successful
489
        except sql.MissingCastException, e:
490
            log_exc(e)
491
            
492
            out_col = e.col
493
            type_ = e.type
494
            
495
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
496
                +strings.as_tt(type_))
497
            in_col = mapping[out_col]
498
            while True:
499
                try:
500
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
501
                        errors_table_)
502
                    break # cast successful
503
                except sql.InvalidValueException, e:
504
                    log_exc(e)
505
                    
506
                    ignore(in_col, e.value, e)
507
        except sql.DuplicateKeyException, e:
508
            log_exc(e)
509
            
510
            # Different rows violating different unique constraints not
511
            # supported
512
            assert not join_cols
513
            
514
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
515
            log_debug('Ignoring existing rows, comparing on these columns:\n'
516
                +strings.as_inline_table(join_cols, ustr=col_ustr))
517
            
518
            if is_literals:
519
                return sql.value(sql.select(db, out_table, [out_pkey_col],
520
                    mapping, order_by=None))
521
            
522
            # Uniquify input table to avoid internal duplicate keys
523
            insert_in_table = sql.distinct_table(db, insert_in_table,
524
                join_cols.values())
525
            insert_in_tables.append(insert_in_table)
526
        except sql.NullValueException, e:
527
            log_exc(e)
528
            
529
            out_col, = e.cols
530
            try: in_col = mapping[out_col]
531
            except KeyError:
532
                msg = 'Missing mapping for NOT NULL column '+out_col
533
                log_debug(msg)
534
                if default == None: on_error(SyntaxError(msg)) # required col
535
                remove_all_rows()
536
            else: ignore(in_col, None, e)
537
        except sql.CheckException, e:
538
            log_exc(e)
539
            
540
            ignore_cond(e.cond, e)
541
        except sql.InvalidValueException, e:
542
            log_exc(e)
543
            
544
            for in_col in mapping.values(): ignore(in_col, e.value, e)
545
        except psycopg2.extensions.TransactionRollbackError, e:
546
            log_exc(e)
547
            # retry
548
        except sql.DatabaseErrors, e:
549
            log_exc(e)
550
            
551
            log_debug('No handler for exception')
552
            on_error(e)
553
            remove_all_rows()
554
        # after exception handled, rerun loop with additional constraints
555
    
556
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
557
        row_ct_ref[0] += cur.rowcount
558
    
559
    if is_literals_or_function: pass # pkeys table already created
560
    elif has_joins:
561
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
562
        log_debug('Getting output table pkeys of existing/inserted rows')
563
        insert_into_pkeys(select_joins, pkeys_cols)
564
    else:
565
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
566
        
567
        log_debug('Getting input table pkeys of inserted rows')
568
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
569
        # since the SELECT query is identical to the one used in INSERT SELECT,
570
        # its rows will be retrieved in the same order.
571
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
572
            into=insert_in_pkeys)
573
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
574
        
575
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
576
            db, insert_in_pkeys)
577
        
578
        log_debug('Combining output and input pkeys in inserted order')
579
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
580
            {sql.row_num_col: sql_gen.join_same_not_null})]
581
        insert_into_pkeys(pkey_joins, pkeys_names)
582
        
583
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
584
    
585
    if not is_literals_or_function:
586
        log_debug('Setting pkeys of missing rows to '
587
            +strings.as_tt(repr(default)))
588
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
589
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
590
            # must use join_same_not_null or query will take forever
591
        insert_into_pkeys(missing_rows_joins,
592
            [sql_gen.Col(in_pkey, full_in_table),
593
            sql_gen.NamedCol(out_pkey, default)])
594
    # otherwise, there is already an entry for every row
595
    
596
    if is_literals: return sql.value(cur)
597
    else:
598
        assert (sql.table_row_count(db, into)
599
            == sql.table_row_count(db, full_in_table))
600
        
601
        sql.empty_temp(db, insert_in_tables+[full_in_table])
602
        
603
        srcs = []
604
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
605
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)