Revision 3309
Added by Aaron Marcuse-Kubitza over 12 years ago
lib/sql_io.py | ||
---|---|---|
346 | 346 |
sql.update(db, insert_in_table, [(in_col, None)], |
347 | 347 |
sql_gen.ColValueCond(in_col, value)) |
348 | 348 |
else: |
349 |
for table in insert_in_tables: # must delete from all copies |
|
350 |
sql.add_index(db, in_col, table) # enable fast filtering |
|
351 |
sql.delete(db, table, sql_gen.ColValueCond(in_col, value)) |
|
349 |
sql.add_index(db, in_col, insert_in_table) # enable fast filtering |
|
350 |
sql.delete(db, insert_in_table, sql_gen.ColValueCond(in_col, value)) |
|
352 | 351 |
if value == None: not_null_cols.add(in_col) |
353 | 352 |
|
354 | 353 |
def insert_pkeys_table(which): |
Also available in: Unified diff
sql_io.py: put_table(): ignore(): Only delete from the insert_in_table, because the invalid rows only need to be removed from the rows that are actually being inserted into the DB. If there are invalid rows in the full (not uniquified) in_table, that's OK, as they can still get a valid output pkey if the first copy of a row they were considered a duplicate of is valid (this is a very unusual situation, so this change should not affect most real data).