Project

General

Profile

1
# Database import/export
2

    
3
import operator
4

    
5
import exc
6
import dicts
7
import sql
8
import sql_gen
9
import strings
10
import util
11

    
12
##### Data cleanup
13

    
14
def cleanup_table(db, table, cols):
15
    table = sql_gen.as_Table(table)
16
    cols = map(sql_gen.as_Col, cols)
17
    
18
    expr = ('nullif(nullif(trim(both from %s), '+db.esc_value('')+'), '
19
        +db.esc_value(r'\N')+')')
20
    changes = [(v, sql_gen.CustomCode(expr % v.to_str(db)))
21
        for v in cols]
22
    
23
    sql.update(db, table, changes, in_place=True)
24

    
25
##### Error tracking
26

    
27
def track_data_error(db, errors_table, cols, value, error_code, error):
28
    '''
29
    @param errors_table If None, does nothing.
30
    '''
31
    if errors_table == None or cols == (): return
32
    
33
    for col in cols:
34
        try:
35
            sql.insert(db, errors_table, dict(column=col.name, value=value,
36
                error_code=error_code, error=error), recover=True,
37
                cacheable=True, log_level=4)
38
        except sql.DuplicateKeyException: pass
39

    
40
class ExcToErrorsTable(sql_gen.ExcToWarning):
41
    '''Handles an exception by saving it or converting it to a warning.'''
42
    def __init__(self, return_, srcs, errors_table, value=None):
43
        '''
44
        @param return_ See sql_gen.ExcToWarning
45
        @param srcs The column names for the errors table
46
        @param errors_table None|sql_gen.Table
47
        @param value The value (or an expression for it) that caused the error
48
        @pre The invalid value must be in a local variable "value" of type text.
49
        '''
50
        sql_gen.ExcToWarning.__init__(self, return_)
51
        
52
        value = sql_gen.as_Code(value)
53
        
54
        self.srcs = srcs
55
        self.errors_table = errors_table
56
        self.value = value
57
    
58
    def to_str(self, db):
59
        if not self.srcs or self.errors_table == None:
60
            return sql_gen.ExcToWarning.to_str(self, db)
61
        
62
        errors_table_cols = map(sql_gen.Col,
63
            ['column', 'value', 'error_code', 'error'])
64
        col_names_query = sql.mk_select(db, sql_gen.NamedValues('c', None,
65
            [[c.name] for c in self.srcs]), order_by=None)
66
        insert_query = sql.mk_insert_select(db, self.errors_table,
67
            errors_table_cols,
68
            sql_gen.Values(errors_table_cols).to_str(db))+';\n'
69
        return '''\
70
-- Save error in errors table.
71
DECLARE
72
    error_code text := SQLSTATE;
73
    error text := SQLERRM;
74
    value text := '''+self.value.to_str(db)+''';
75
    "column" text;
76
BEGIN
77
    -- Insert the value and error for *each* source column.
78
'''+strings.indent(sql_gen.RowExcIgnore(None, col_names_query, insert_query,
79
    row_var=errors_table_cols[0]).to_str(db))+'''
80
END;
81

    
82
'''+self.return_.to_str(db)
83

    
84
def data_exception_handler(*args, **kw_args):
85
    '''Handles a data_exception by saving it or converting it to a warning.
86
    For params, see ExcToErrorsTable().
87
    '''
88
    return sql_gen.data_exception_handler(ExcToErrorsTable(*args, **kw_args))
89

    
90
def cast(db, type_, col, errors_table=None):
91
    '''Casts an (unrenamed) column or value.
92
    If errors_table set and col has srcs, saves errors in errors_table (using
93
    col's srcs attr as source columns). Otherwise, converts errors to warnings.
94
    @param col str|sql_gen.Col|sql_gen.Literal
95
    @param errors_table None|sql_gen.Table|str
96
    '''
97
    col = sql_gen.as_Col(col)
98
    
99
    # Don't convert exceptions to warnings for user-supplied constants
100
    if isinstance(col, sql_gen.Literal): return sql_gen.Cast(type_, col)
101
    
102
    assert not isinstance(col, sql_gen.NamedCol)
103
    
104
    function_name = strings.first_word(type_)
105
    srcs = col.srcs
106
    save_errors = errors_table != None and srcs
107
    if save_errors: # function will be unique for the given srcs
108
        function_name = str(sql_gen.FunctionCall(function_name,
109
            *map(sql_gen.to_name_only_col, srcs)))
110
    function = db.TempFunction(function_name)
111
    
112
    # Create function definition
113
    modifiers = 'STRICT'
114
    if not save_errors: modifiers = 'IMMUTABLE '+modifiers
115
    value_param = sql_gen.FunctionParam('value', 'text')
116
    handler = data_exception_handler('RETURN NULL;\n', srcs, errors_table,
117
        value_param.name)
118
    body = sql_gen.CustomCode(handler.to_str(db, '''\
119
/* The explicit cast to the return type is needed to make the cast happen
120
inside the try block. (Implicit casts to the return type happen at the end
121
of the function, outside any block.) */
122
RETURN value::'''+type_+''';
123
'''))
124
    body.lang='plpgsql'
125
    sql.define_func(db, sql_gen.FunctionDef(function, type_, body,
126
        [value_param], modifiers))
127
    
128
    return sql_gen.FunctionCall(function, col)
129

    
130
def func_wrapper_exception_handler(return_, args, errors_table):
131
    '''Handles a function call's data_exceptions.
132
    Supports PL/Python functions.
133
    @param return_ See data_exception_handler()
134
    @param args [arg...] Function call's args
135
    @param errors_table See data_exception_handler()
136
    '''
137
    args = filter(sql_gen.has_srcs, args)
138
    
139
    srcs = sql_gen.cross_join_srcs(args)
140
    value = sql_gen.ArrayJoin(',', args)
141
    return sql_gen.NestedExcHandler(
142
        data_exception_handler(return_, srcs, errors_table, value)
143
        , sql_gen.plpythonu_error_handler
144
        )
145

    
146
def cast_temp_col(db, type_, col, errors_table=None):
147
    '''Like cast(), but creates a new column with the cast values if the input
148
    is a column.
149
    @return The new column or cast value
150
    '''
151
    def cast_(col): return cast(db, type_, col, errors_table)
152
    
153
    try: col = sql_gen.underlying_col(col)
154
    except sql_gen.NoUnderlyingTableException: return sql_gen.wrap(cast_, col)
155
    
156
    table = col.table
157
    new_col = sql_gen.suffixed_col(col, '::'+strings.first_word(type_))
158
    expr = cast_(col)
159
    
160
    # Add column
161
    new_typed_col = sql_gen.TypedCol(new_col.name, type_)
162
    sql.add_col(db, table, new_typed_col, comment=repr(col)+'::'+type_)
163
    new_col.name = new_typed_col.name # propagate any renaming
164
    
165
    sql.update(db, table, [(new_col, expr)], in_place=True, recover=True)
166
    
167
    return new_col
168

    
169
def errors_table(db, table, if_exists=True):
170
    '''
171
    @param if_exists If set, returns None if the errors table doesn't exist
172
    @return None|sql_gen.Table
173
    '''
174
    table = sql_gen.as_Table(table)
175
    if table.srcs != (): table = table.srcs[0]
176
    
177
    errors_table = sql_gen.suffixed_table(table, '.errors')
178
    if if_exists and not sql.table_exists(db, errors_table): return None
179
    return errors_table
180

    
181
##### Import
182

    
183
def put(db, table, row, pkey_=None, row_ct_ref=None):
184
    '''Recovers from errors.
185
    Only works under PostgreSQL (uses INSERT RETURNING).
186
    '''
187
    row = sql_gen.ColDict(db, table, row)
188
    if pkey_ == None: pkey_ = sql.pkey(db, table, recover=True)
189
    
190
    try:
191
        cur = sql.insert(db, table, row, pkey_, recover=True, log_level=3.5)
192
        if row_ct_ref != None and cur.rowcount >= 0:
193
            row_ct_ref[0] += cur.rowcount
194
        return sql.value(cur)
195
    except sql.DuplicateKeyException, e:
196
        row = sql_gen.ColDict(db, table,
197
            util.dict_subset_right_join(row, e.cols))
198
        return sql.value(sql.select(db, table, [pkey_], row, recover=True,
199
            log_level=3.5))
200
    except sql.NullValueException: return None
201

    
202
def get(db, table, row, pkey, row_ct_ref=None, create=False):
203
    '''Recovers from errors'''
204
    try:
205
        return sql.value(sql.select(db, table, [pkey], row, limit=1,
206
            recover=True))
207
    except StopIteration:
208
        if not create: raise
209
        return put(db, table, row, pkey, row_ct_ref) # insert new row
210

    
211
def is_func_result(col):
212
    return col.table.name.find('(') >= 0 and col.name == 'result'
213

    
214
def into_table_name(out_table, in_tables0, mapping, is_func):
215
    def in_col_str(in_col):
216
        in_col = sql_gen.remove_col_rename(in_col)
217
        if isinstance(in_col, sql_gen.Col):
218
            table = in_col.table
219
            if table == in_tables0:
220
                in_col = sql_gen.to_name_only_col(in_col)
221
            elif is_func_result(in_col): in_col = table # omit col name
222
        return str(in_col)
223
    
224
    str_ = str(out_table)
225
    if is_func:
226
        str_ += '('
227
        
228
        try: value_in_col = mapping['value']
229
        except KeyError:
230
            str_ += ', '.join((str(k)+'='+in_col_str(v)
231
                for k, v in mapping.iteritems()))
232
        else: str_ += in_col_str(value_in_col)
233
        
234
        str_ += ')'
235
    else:
236
        out_col = 'rank'
237
        try: in_col = mapping[out_col]
238
        except KeyError: str_ += '_pkeys'
239
        else: # has a rank column, so hierarchical
240
            str_ += '['+str(out_col)+'='+in_col_str(in_col)+']'
241
    return str_
242

    
243
def put_table(db, out_table, in_tables, mapping, row_ct_ref=None, into=None,
244
    default=None, is_func=False, on_error=exc.raise_):
245
    '''Recovers from errors.
246
    Only works under PostgreSQL (uses INSERT RETURNING).
247
    IMPORTANT: Must be run at the *beginning* of a transaction.
248
    @param in_tables The main input table to select from, followed by a list of
249
        tables to join with it using the main input table's pkey
250
    @param mapping dict(out_table_col=in_table_col, ...)
251
        * out_table_col: str (*not* sql_gen.Col)
252
        * in_table_col: sql_gen.Col|literal-value
253
    @param into The table to contain the output and input pkeys.
254
        Defaults to `out_table.name+'_pkeys'`.
255
    @param default The *output* column to use as the pkey for missing rows.
256
        If this output column does not exist in the mapping, uses None.
257
    @param is_func Whether out_table is the name of a SQL function, not a table
258
    @return sql_gen.Col Where the output pkeys are made available
259
    '''
260
    import psycopg2.extensions
261
    
262
    out_table = sql_gen.as_Table(out_table)
263
    
264
    def log_debug(msg): db.log_debug(msg, level=1.5)
265
    def col_ustr(str_):
266
        return strings.repr_no_u(sql_gen.remove_col_rename(str_))
267
    
268
    log_debug('********** New iteration **********')
269
    log_debug('Inserting these input columns into '+strings.as_tt(
270
        out_table.to_str(db))+':\n'+strings.as_table(mapping, ustr=col_ustr))
271
    
272
    is_function = sql.function_exists(db, out_table)
273
    
274
    if is_function: out_pkey = 'result'
275
    else: out_pkey = sql.pkey(db, out_table, recover=True)
276
    out_pkey_col = sql_gen.as_Col(out_pkey, out_table)
277
    
278
    in_tables_ = in_tables[:] # don't modify input!
279
    try: in_tables0 = in_tables_.pop(0) # first table is separate
280
    except IndexError: in_tables0 = None
281
    else:
282
        in_pkey = sql.pkey(db, in_tables0, recover=True)
283
        in_pkey_col = sql_gen.as_Col(in_pkey, in_tables0)
284
    
285
    # Determine if can use optimization for only literal values
286
    is_literals = not reduce(operator.or_, map(sql_gen.is_table_col,
287
        mapping.values()), False)
288
    is_literals_or_function = is_literals or is_function
289
    
290
    if in_tables0 == None: errors_table_ = None
291
    else: errors_table_ = errors_table(db, in_tables0)
292
    
293
    # Create input joins from list of input tables
294
    input_joins = [in_tables0]+[sql_gen.Join(v,
295
        {in_pkey: sql_gen.join_same_not_null}) for v in in_tables_]
296
    
297
    if mapping == {} and not is_function: # need >= one column for INSERT SELECT
298
        mapping = {out_pkey: None} # ColDict will replace with default value
299
    
300
    if not is_literals:
301
        if into == None:
302
            into = into_table_name(out_table, in_tables0, mapping, is_func)
303
        into = sql_gen.as_Table(into)
304
        
305
        # Set column sources
306
        in_cols = filter(sql_gen.is_table_col, mapping.values())
307
        for col in in_cols:
308
            if col.table == in_tables0: col.set_srcs(sql_gen.src_self)
309
        
310
        log_debug('Joining together input tables into temp table')
311
        # Place in new table so don't modify input and for speed
312
        in_table = sql_gen.Table('in')
313
        mapping = dicts.join(mapping, sql.flatten(db, in_table, input_joins,
314
            in_cols, preserve=[in_pkey_col]))
315
        input_joins = [in_table]
316
        db.log_debug('Temp table: '+strings.as_tt(in_table.to_str(db)), level=2)
317
    
318
    mapping = sql_gen.ColDict(db, out_table, mapping)
319
        # after applying dicts.join() because that returns a plain dict
320
    
321
    # Resolve default value column
322
    if default != None:
323
        try: default = mapping[default]
324
        except KeyError:
325
            db.log_debug('Default value column '
326
                +strings.as_tt(strings.repr_no_u(default))
327
                +' does not exist in mapping, falling back to None', level=2.1)
328
            default = None
329
    
330
    # Save default values for all rows since in_table may have rows deleted
331
    if is_literals: pass
332
    elif is_function: full_in_table = in_table
333
    else:
334
        full_in_table = sql_gen.suffixed_table(in_table, '_full')
335
        full_in_table_cols = [in_pkey_col]
336
        if default != None:
337
            full_in_table_cols.append(default)
338
            default = sql_gen.with_table(default, full_in_table)
339
        sql.run_query_into(db, sql.mk_select(db, in_table, full_in_table_cols,
340
            order_by=None), into=full_in_table, add_pkey_=True)
341
    
342
    if not is_literals:
343
        pkeys_names = [in_pkey, out_pkey]
344
        pkeys_cols = [in_pkey_col, out_pkey_col]
345
    
346
    pkeys_table_exists_ref = [False]
347
    def insert_into_pkeys(joins, cols=None, limit=None, **kw_args):
348
        query = sql.mk_select(db, joins, cols, order_by=None, limit=limit)
349
        if pkeys_table_exists_ref[0]:
350
            sql.insert_select(db, into, pkeys_names, query, **kw_args)
351
        else:
352
            sql.run_query_into(db, query, into=into, add_pkey_=True, **kw_args)
353
            pkeys_table_exists_ref[0] = True
354
    
355
    limit_ref = [None]
356
    def mk_main_select(joins, cols):
357
        return sql.mk_select(db, joins, cols, limit=limit_ref[0], order_by=None)
358
    
359
    if is_literals: insert_in_table = None
360
    else:
361
        insert_in_table = in_table
362
        insert_in_tables = [insert_in_table]
363
    join_cols = sql_gen.ColDict(db, out_table)
364
    
365
    exc_strs = set()
366
    def log_exc(e):
367
        e_str = exc.str_(e, first_line_only=True)
368
        log_debug('Caught exception: '+e_str)
369
        assert e_str not in exc_strs # avoid infinite loops
370
        exc_strs.add(e_str)
371
    
372
    def remove_all_rows():
373
        log_debug('Ignoring all rows')
374
        limit_ref[0] = 0 # just create an empty pkeys table
375
    
376
    def ignore_cond(cond, e):
377
        out_table_cols = sql_gen.ColDict(db, out_table)
378
        out_table_cols.update(util.dict_subset_right_join({},
379
            sql.table_cols(db, out_table)))
380
        
381
        in_cols = []
382
        cond = sql.map_expr(db, cond, mapping, in_cols)
383
        cond = sql.map_expr(db, cond, out_table_cols)
384
        
385
        track_data_error(db, errors_table_, sql_gen.cols_srcs(in_cols), None,
386
            e.cause.pgcode,
387
            strings.ensure_newl(e.cause.pgerror)+'condition: '+cond)
388
        
389
        not_cond = sql_gen.NotCond(sql_gen.CustomCode(cond))
390
        log_debug('Ignoring rows where '+strings.as_tt(not_cond.to_str(db)))
391
        sql.delete(db, insert_in_table, not_cond)
392
    
393
    not_null_cols = set()
394
    def ignore(in_col, value, e):
395
        in_col = sql_gen.with_table(in_col, insert_in_table)
396
        
397
        track_data_error(db, errors_table_, in_col.srcs, value,
398
            e.cause.pgcode, e.cause.pgerror)
399
        log_debug('Ignoring rows with '+strings.as_tt(repr(in_col))+' = '
400
            +strings.as_tt(repr(value)))
401
        
402
        sql.add_index(db, in_col, insert_in_table) # enable fast filtering
403
        if value != None and in_col not in not_null_cols:
404
            # Try just mapping the value to NULL
405
            sql.update(db, insert_in_table, [(in_col, None)],
406
                sql_gen.ColValueCond(in_col, value))
407
        else:
408
            sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value))
409
            if value == None: not_null_cols.add(in_col)
410
    
411
    if not is_literals:
412
        def insert_pkeys_table(which):
413
            return sql_gen.Table(sql_gen.concat(in_table.name,
414
                '_insert_'+which+'_pkeys'))
415
        insert_out_pkeys = insert_pkeys_table('out')
416
        insert_in_pkeys = insert_pkeys_table('in')
417
    
418
    # Do inserts and selects
419
    while True:
420
        has_joins = join_cols != {}
421
        
422
        if limit_ref[0] == 0: # special case
423
            assert not has_joins
424
            
425
            if is_literals: return None
426
            log_debug('Creating an empty output pkeys table')
427
            cur = sql.run_query_into(db, sql.mk_select(db, out_table,
428
                [out_pkey], order_by=None, limit=0), into=insert_out_pkeys)
429
            break # don't do main case
430
        
431
        log_debug('Trying to insert new rows')
432
        
433
        # Prepare to insert new rows
434
        if is_function:
435
            log_debug('Calling function on input rows')
436
            args = dict(((k.name, v) for k, v in mapping.iteritems()))
437
            func_call = sql_gen.NamedCol(out_pkey,
438
                sql_gen.FunctionCall(out_table, **args))
439
            
440
            if not is_literals:
441
                # Create empty pkeys table so its row type can be used
442
                insert_into_pkeys(input_joins, [in_pkey_col, func_call],
443
                    limit=0, recover=True)
444
                
445
                ## Create error handling wrapper function
446
                
447
                wrapper = db.TempFunction(sql_gen.concat(into.name, '_wrap'))
448
                
449
                select_cols = [in_pkey_col]+args.values()
450
                in_pkey_var = sql_gen.Col(in_pkey, sql_gen.row_var)
451
                
452
                args = dict(((k, sql_gen.with_table(v, sql_gen.row_var))
453
                    for k, v in args.iteritems()))
454
                func_call = sql_gen.FunctionCall(out_table, **args)
455
                
456
                def mk_return(result):
457
                    return sql_gen.ReturnQuery(sql.mk_select(db,
458
                        fields=[in_pkey_var, result], explain=False))
459
                exc_handler = func_wrapper_exception_handler(mk_return(None),
460
                    args.values(), errors_table_)
461
                
462
                sql.define_func(db, sql_gen.FunctionDef(wrapper,
463
                    sql_gen.SetOf(into),
464
                    sql_gen.RowExcIgnore(sql_gen.RowType(in_table),
465
                        sql.mk_select(db, input_joins, order_by=None),
466
                        mk_return(func_call), exc_handler=exc_handler)
467
                    ))
468
                wrapper_table = sql_gen.FunctionCall(wrapper)
469
        else:
470
            insert_args = dict(recover=True, cacheable=False)
471
            if has_joins:
472
                insert_args.update(dict(ignore=True))
473
            else:
474
                insert_args.update(dict(returning=out_pkey))
475
                if not is_literals:
476
                    insert_args.update(dict(into=insert_out_pkeys))
477
            main_select = mk_main_select([insert_in_table], [sql_gen.with_table(
478
                c, insert_in_table) for c in mapping.values()])
479
        
480
        try:
481
            cur = None
482
            if is_function:
483
                if is_literals: cur = sql.select(db, fields=[func_call])
484
                else: insert_into_pkeys(wrapper_table, recover=True)
485
            else:
486
                cur = sql.insert_select(db, out_table, mapping.keys(),
487
                    main_select, **insert_args)
488
            break # insert successful
489
        except sql.MissingCastException, e:
490
            log_exc(e)
491
            
492
            out_col = e.col
493
            type_ = e.type
494
            
495
            log_debug('Casting '+strings.as_tt(out_col)+' input to '
496
                +strings.as_tt(type_))
497
            in_col = mapping[out_col]
498
            while True:
499
                try:
500
                    mapping[out_col] = cast_temp_col(db, type_, in_col,
501
                        errors_table_)
502
                    break # cast successful
503
                except sql.InvalidValueException, e:
504
                    log_exc(e)
505
                    
506
                    ignore(in_col, e.value, e)
507
        except sql.DuplicateKeyException, e:
508
            log_exc(e)
509
            
510
            # Different rows violating different unique constraints not
511
            # supported
512
            assert not join_cols
513
            
514
            join_cols.update(util.dict_subset_right_join(mapping, e.cols))
515
            log_debug('Ignoring existing rows, comparing on these columns:\n'
516
                +strings.as_inline_table(join_cols, ustr=col_ustr))
517
            
518
            if is_literals:
519
                return sql.value(sql.select(db, out_table, [out_pkey_col],
520
                    mapping, order_by=None))
521
            
522
            # Uniquify input table to avoid internal duplicate keys
523
            insert_in_table = sql.distinct_table(db, insert_in_table,
524
                join_cols.values())
525
            insert_in_tables.append(insert_in_table)
526
        except sql.NullValueException, e:
527
            log_exc(e)
528
            
529
            out_col, = e.cols
530
            try: in_col = mapping[out_col]
531
            except KeyError:
532
                msg = 'Missing mapping for NOT NULL column '+out_col
533
                log_debug(msg)
534
                if default == None: on_error(SyntaxError(msg)) # required col
535
                remove_all_rows()
536
            else: ignore(in_col, None, e)
537
        except sql.CheckException, e:
538
            log_exc(e)
539
            
540
            ignore_cond(e.cond, e)
541
        except sql.InvalidValueException, e:
542
            log_exc(e)
543
            
544
            for in_col in mapping.values(): ignore(in_col, e.value, e)
545
        except psycopg2.extensions.TransactionRollbackError, e:
546
            log_exc(e)
547
            # retry
548
        except sql.DatabaseErrors, e:
549
            log_exc(e)
550
            
551
            log_debug('No handler for exception')
552
            on_error(e)
553
            remove_all_rows()
554
        # after exception handled, rerun loop with additional constraints
555
    
556
    if cur != None and row_ct_ref != None and cur.rowcount >= 0:
557
        row_ct_ref[0] += cur.rowcount
558
    
559
    if is_literals_or_function: pass # pkeys table already created
560
    elif has_joins:
561
        select_joins = input_joins+[sql_gen.Join(out_table, join_cols)]
562
        log_debug('Getting output table pkeys of existing/inserted rows')
563
        insert_into_pkeys(select_joins, pkeys_cols)
564
    else:
565
        sql.add_row_num(db, insert_out_pkeys) # for joining with input pkeys
566
        
567
        log_debug('Getting input table pkeys of inserted rows')
568
        # Note that mk_main_select() does not use ORDER BY. Instead, assume that
569
        # since the SELECT query is identical to the one used in INSERT SELECT,
570
        # its rows will be retrieved in the same order.
571
        sql.run_query_into(db, mk_main_select(input_joins, [in_pkey]),
572
            into=insert_in_pkeys)
573
        sql.add_row_num(db, insert_in_pkeys) # for joining with output pkeys
574
        
575
        assert sql.table_row_count(db, insert_out_pkeys) == sql.table_row_count(
576
            db, insert_in_pkeys)
577
        
578
        log_debug('Combining output and input pkeys in inserted order')
579
        pkey_joins = [insert_in_pkeys, sql_gen.Join(insert_out_pkeys,
580
            {sql.row_num_col: sql_gen.join_same_not_null})]
581
        insert_into_pkeys(pkey_joins, pkeys_names)
582
        
583
        sql.empty_temp(db, [insert_out_pkeys, insert_in_pkeys])
584
    
585
    if not is_literals_or_function:
586
        log_debug('Setting pkeys of missing rows to '
587
            +strings.as_tt(repr(default)))
588
        missing_rows_joins = [full_in_table, sql_gen.Join(into,
589
            {in_pkey: sql_gen.join_same_not_null}, sql_gen.filter_out)]
590
            # must use join_same_not_null or query will take forever
591
        insert_into_pkeys(missing_rows_joins,
592
            [sql_gen.Col(in_pkey, full_in_table),
593
            sql_gen.NamedCol(out_pkey, default)])
594
    # otherwise, there is already an entry for every row
595
    
596
    if is_literals: return sql.value(cur)
597
    else:
598
        assert (sql.table_row_count(db, into)
599
            == sql.table_row_count(db, full_in_table))
600
        
601
        sql.empty_temp(db, insert_in_tables+[full_in_table])
602
        
603
        srcs = []
604
        if is_func: srcs = sql_gen.cols_srcs(in_cols)
605
        return sql_gen.Col(out_pkey, into, srcs)
(26-26/37)