Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# Multi-safe (supports an input appearing multiple times).
5
# For outputting an XML file to a PostgreSQL database, use the general format of
6
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
7
# Duplicate-column safe (supports multiple columns of the same name, which will
8
# be combined)
9

    
10
import copy
11
import csv
12
import itertools
13
import os.path
14
import warnings
15
import sys
16
import xml.dom.minidom as minidom
17

    
18
sys.path.append(os.path.dirname(__file__)+"/../lib")
19

    
20
import csvs
21
import db_xml
22
import exc
23
import ints
24
import iters
25
import maps
26
import opts
27
import parallelproc
28
import Parser
29
import profiling
30
import sql
31
import sql_gen
32
import sql_io
33
import streams
34
import strings
35
import term
36
import util
37
import xpath
38
import xml_dom
39
import xml_func
40
import xml_parse
41

    
42
metadata_prefix = ':'
43
collision_suffix = '/_alt/'
44

    
45
def get_with_prefix(map_, prefixes, key):
46
    '''Gets all entries for the given key with any of the given prefixes
47
    @return tuple(found_key, found_value)
48
    '''
49
    values = []
50
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
51
        try: value = map_[key_]
52
        except KeyError, e: continue # keep going
53
        values.append((key_, value))
54
    
55
    if values != []: return values
56
    else: raise e # re-raise last KeyError
57

    
58
def is_metadata(str_): return str_.startswith(metadata_prefix)
59

    
60
def metadata_value(name):
61
    removed_ref = [False]
62
    name = strings.remove_prefix(metadata_prefix, name, removed_ref)
63
    if removed_ref[0]: return name
64
    else: return None
65

    
66
def cleanup(val):
67
    if val == None: return val
68
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
69

    
70
def main_():
71
    env_names = []
72
    def usage_err():
73
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
74
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
75
            'Note: Row #s start with 1')
76
    
77
    ## Get config from env vars
78
    
79
    # Modes
80
    test = opts.env_flag('test', False, env_names)
81
    commit = opts.env_flag('commit', False, env_names) and not test
82
        # never commit in test mode
83
    redo = opts.env_flag('redo', False, env_names) and not commit
84
        # never redo in commit mode (run `make schemas/reinstall` instead)
85
    
86
    # Ranges
87
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
88
    # Make start interally 0-based.
89
    # It's 1-based to the user to match up with the staging table row #s.
90
    start -= 1
91
    if test: n_default = 1
92
    else: n_default = None
93
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
94
        u''))
95
    end = n
96
    if end != None: end += start
97
    
98
    # Debugging
99
    verbosity = util.cast(float, opts.get_env_var('verbosity', None, env_names))
100
    opts.get_env_var('profile_to', None, env_names) # add to env_names
101
    
102
    # DB
103
    def get_db_config(prefix):
104
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
105
    in_db_config = get_db_config('in')
106
    out_db_config = get_db_config('out')
107
    in_is_db = 'engine' in in_db_config
108
    out_is_db = 'engine' in out_db_config
109
    in_schema = opts.get_env_var('in_schema', None, env_names)
110
    in_table = opts.get_env_var('in_table', None, env_names)
111
    if in_schema != None:
112
        for config in [in_db_config, out_db_config]:
113
            config['schemas'] += ','+in_schema
114
    
115
    # Optimization
116
    cache_sql = opts.env_flag('cache_sql', True, env_names)
117
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
118
        env_names) # by-column optimization only applies if mapping to same DB
119
    if test: cpus_default = 0
120
    else: cpus_default = 0 # or None to use parallel processing by default
121
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
122
        env_names), u''))
123
    
124
    # Set default verbosity. Must happen after by_col is set.
125
    if verbosity == None:
126
        if test: verbosity = 0.5 # automated tests should not be verbose
127
        elif by_col: verbosity = 3 # show all queries to assist debugging
128
        else: verbosity = 1.1 # just show row progress
129
    
130
    # fix verbosity
131
    if by_col and not test: verbosity = ints.set_min(verbosity, 2)
132
        # live column-based import MUST be run with verbosity 2+ (3 preferred)
133
        # to provide debugging information for often-complex errors.
134
        # without this, debugging is effectively impossible.
135
        # automated tests are exempt from this because they output to the screen
136
    
137
    ##
138
    
139
    # Logging
140
    verbose_errors = test and verbosity > 0
141
    debug = verbosity >= 1.5
142
    def log(msg, level=1):
143
        '''Higher level -> more verbose'''
144
        if level <= verbosity:
145
            if verbosity <= 2:
146
                if level == 1.5: msg = '# '+msg # msg is Redmine list item
147
                elif msg.startswith('DB query:'): # remove extra debug info
148
                    first_line, nl, msg = msg.partition('\n')
149
            elif level > 1: msg = '['+str(level)+'] '+msg # include level in msg
150
            
151
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
152
    if debug: log_debug = lambda msg, level=2: log(msg, level)
153
    else: log_debug = sql.log_debug_none
154
    
155
    # Parse args
156
    map_paths = sys.argv[1:]
157
    if map_paths == []:
158
        if in_is_db or not out_is_db: usage_err()
159
        else: map_paths = [None]
160
    
161
    def connect_db(db_config):
162
        log('Connecting to '+sql.db_config_str(db_config))
163
        return sql.connect(db_config, caching=cache_sql, autocommit=commit,
164
            debug_temp=verbosity > 3 and commit, log_debug=log_debug)
165
    
166
    if end != None: end_str = str(end-1) # end is one past the last #
167
    else: end_str = 'end'
168
    log('Processing input rows '+str(start)+'-'+end_str)
169
    
170
    ex_tracker = exc.ExPercentTracker(iter_text='row')
171
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
172
    
173
    # Parallel processing
174
    pool = parallelproc.MultiProducerPool(cpus)
175
    log('Using '+str(pool.process_ct)+' parallel CPUs')
176
    
177
    # Set up DB access
178
    row_ins_ct_ref = [0]
179
    if out_is_db:
180
        out_db = connect_db(out_db_config)
181
        def is_rel_func(name):
182
            return (name in db_xml.put_special_funcs
183
                or sql.function_exists(out_db, sql_gen.Function(name)))
184
    
185
    doc = xml_dom.create_doc()
186
    root = doc.documentElement
187
    out_is_xml_ref = [False]
188
    
189
    in_label_ref = [None]
190
    col_defaults = {}
191
    def update_in_label():
192
        if in_schema != None: os.environ['source'] = in_schema
193
        elif in_label_ref[0] != None: os.environ['source'] = in_label_ref[0]
194
    
195
    def prep_root():
196
        root.clear()
197
        update_in_label()
198
    prep_root()
199
    
200
    def process_input(root, row_ready, map_path):
201
        '''Inputs datasource to XML tree, mapping if needed'''
202
        # Load map header
203
        in_is_xpaths = True
204
        out_is_xpaths = True
205
        out_label = None
206
        if map_path != None:
207
            metadata = []
208
            mappings = []
209
            stream = open(map_path, 'rb')
210
            reader = csv.reader(stream)
211
            in_label, out_label = reader.next()[:2]
212
            
213
            def split_col_name(name):
214
                label, sep, root = name.partition(':')
215
                return label, sep != '', root, []
216
            
217
            in_label, in_root, prefixes = maps.col_info(in_label)
218
            in_is_xpaths = in_root != None
219
            in_label_ref[0] = in_label
220
            update_in_label()
221
            out_label, out_root = maps.col_info(out_label)[:2]
222
            out_is_xpaths = out_root != None
223
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
224
                # outer elements are types
225
            
226
            for row in reader:
227
                in_, out = row[:2]
228
                if out != '': mappings.append([in_, out_root+out])
229
            
230
            stream.close()
231
            
232
            root.ownerDocument.documentElement.tagName = out_label
233
        in_is_xml = in_is_xpaths and not in_is_db
234
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
235
        
236
        def process_rows(process_row, rows, rows_start=0):
237
            '''Processes input rows
238
            @param process_row(in_row, i)
239
            @rows_start The (0-based) row # of the first row in rows. Set this
240
                only if the pre-start rows have already been skipped.
241
            '''
242
            rows = iter(rows)
243
            
244
            if end != None: row_nums = xrange(rows_start, end)
245
            else: row_nums = itertools.count(rows_start)
246
            i = -1
247
            for i in row_nums:
248
                try: row = rows.next()
249
                except StopIteration:
250
                    i -= 1 # last row # didn't count
251
                    break # no more rows
252
                if i < start: continue # not at start row yet
253
                
254
                # Row # is interally 0-based, but 1-based to the user
255
                log('Processing input row #'+str(i+1), level=1.1)
256
                process_row(row, i)
257
                row_ready(i, row)
258
            row_ct = i-start+1
259
            return row_ct
260
        
261
        def map_rows(get_value, rows, **kw_args):
262
            '''Maps input rows
263
            @param get_value(in_, row):str
264
            '''
265
            # Prevent collisions if multiple inputs mapping to same output
266
            outputs_idxs = dict()
267
            for i, mapping in enumerate(mappings):
268
                in_, out = mapping
269
                default = util.NamedTuple(count=1, first=i)
270
                idxs = outputs_idxs.setdefault(out, default)
271
                if idxs is not default: # key existed, so there was a collision
272
                    if idxs.count == 1: # first key does not yet have suffix
273
                        mappings[idxs.first][1] += collision_suffix+'0'
274
                    mappings[i][1] += collision_suffix+str(idxs.count)
275
                    idxs.count += 1
276
            
277
            id_node = None
278
            if out_is_db:
279
                mappings_orig = mappings[:] # save a copy
280
                mappings[:] = [] # empty existing elements
281
                for in_, out in mappings_orig:
282
                    in_str = strings.ustr(in_)
283
                    is_metadata_ = is_metadata(in_str)
284
                    if is_metadata_: value = metadata_value(in_str)
285
                    else: value = '$'+in_str # mark as name
286
                    
287
                    # All put_obj()s should return the same id_node
288
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
289
                        value) # value is placeholder that documents name
290
                    if not is_metadata_: mappings.append([in_, nodes])
291
                if id_node == None:
292
                    warnings.warn(UserWarning('Map warning: No mappings or no '
293
                        'column name matches. Are you importing the correct '
294
                        'input table?'))
295
                xml_func.simplify(root)
296
                sys.stdout.write(strings.to_raw_str('Put template:\n'
297
                    +strings.ustr(root)))
298
                sys.stdout.flush()
299
            
300
            def process_row(row, i):
301
                row_id = str(i)
302
                if id_node != None: xml_dom.set_value(id_node, row_id)
303
                for in_, out in mappings:
304
                    log_debug('Getting '+strings.ustr(in_))
305
                    value = cleanup(get_value(in_, row))
306
                    log_debug('Putting '+strings.urepr(value)+' to '
307
                        +strings.ustr(out))
308
                    if out_is_db: # out is list of XML nodes
309
                        for node in out: xml_dom.set_value(node, value)
310
                    elif value != None: # out is XPath
311
                        xpath.put_obj(root, out, row_id, has_types, value)
312
            return process_rows(process_row, rows, **kw_args)
313
        
314
        def map_table(col_names, rows, **kw_args):
315
            col_names_ct = len(col_names)
316
            col_idxs = util.list_flip(col_names)
317
            col_names_map = dict(zip(col_names, col_names))
318
            
319
            # Resolve names
320
            mappings_orig = mappings[:] # save a copy
321
            mappings[:] = [] # empty existing elements
322
            for in_, out in mappings_orig:
323
                if is_metadata(in_): mappings.append([in_, out])
324
                else:
325
                    try: cols = get_with_prefix(col_names_map, [], in_)
326
                    except KeyError: pass
327
                    else:
328
                        mappings[len(mappings):] = [[db_xml.ColRef(
329
                            orig, col_idxs[orig]), out] for simp, orig in cols]
330
                            # can't use += because that uses =
331
            
332
            def get_value(in_, row): return row.list[in_.idx]
333
            def wrap_row(row):
334
                return util.ListDict(util.list_as_length(row, col_names_ct),
335
                    col_names, col_idxs) # handle CSV rows of different lengths
336
            
337
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
338
        
339
        if in_is_db:
340
            def on_error(e): ex_tracker.track(e)
341
            
342
            if by_col: in_db = out_db
343
            else: in_db = connect_db(in_db_config)
344
            
345
            # Get table and schema name
346
            schema = in_schema # modified, so can't have same name as outer var
347
            table = in_table # modified, so can't have same name as outer var
348
            if table == None:
349
                assert in_is_xpaths
350
                schema, sep, table = in_root.partition('.')
351
                if sep == '': # only the table name was specified
352
                    table = schema
353
                    schema = None
354
            table = sql_gen.Table(table, schema)
355
            
356
            # Fetch rows
357
            if by_col: limit = 0 # only fetch column names
358
            else: limit = n
359
            try:
360
                cur = sql.select(in_db, table, limit=limit, start=start,
361
                    recover=True, cacheable=False)
362
            except sql.DoesNotExistException:
363
                table = None
364
                col_names = []
365
                rows = []
366
            else:
367
                col_names = list(sql.col_names(cur))
368
                rows = sql.rows(cur)
369
            
370
            # inline metadata value columns
371
            col_default_values = {}
372
            for col_name in col_names:
373
                col = sql_gen.Col(col_name, table)
374
                if sql.col_is_constant(in_db, col):
375
                    col_default_values[col_name] = (metadata_prefix +
376
                        sql.col_default_value(in_db, col))
377
            for i, mapping in enumerate(mappings):
378
                in_, out = mapping
379
                mappings[i] = (col_default_values.get(in_, in_), out)
380
            
381
            if by_col:
382
                map_table(col_names, []) # just create the template
383
                
384
                if table != None and start == 0 and n == None: # full re-import
385
                    log('Clearing errors table')
386
                    errors_table_ = sql_io.errors_table(in_db, table)
387
                    if errors_table_ != None:
388
                        sql.drop_table(in_db, errors_table_)
389
                
390
                # Strip XML functions not in the DB
391
                xml_func.process(root, is_rel_func=is_rel_func)
392
                if debug: log_debug('Putting stripped:\n'+strings.ustr(root))
393
                    # only calc if debug
394
                
395
                # Import rows
396
                in_row_ct_ref = [0]
397
                db_xml.put_table(in_db, root.firstChild, table, in_row_ct_ref,
398
                    row_ins_ct_ref, n, start, on_error, col_defaults)
399
                row_ct = in_row_ct_ref[0]
400
            else:
401
                # Use normal by-row method
402
                row_ct = map_table(col_names, rows, rows_start=start)
403
                    # rows_start: pre-start rows have been skipped
404
                
405
                in_db.db.close()
406
        elif in_is_xml:
407
            stdin = streams.LineCountStream(sys.stdin)
408
            def on_error(e):
409
                exc.add_msg(e, term.emph('input line #:')+' '
410
                    +str(stdin.line_num))
411
                ex_tracker.track(e)
412
            
413
            def get_rows(doc2rows):
414
                return iters.flatten(itertools.imap(doc2rows,
415
                    xml_parse.docs_iter(stdin, on_error)))
416
            
417
            if map_path == None:
418
                def doc2rows(in_xml_root):
419
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
420
                    util.skip(iter_, xml_dom.is_text) # skip metadata
421
                    return iter_
422
                
423
                row_ct = process_rows(lambda row, i: root.appendChild(row),
424
                    get_rows(doc2rows))
425
            else:
426
                def doc2rows(in_xml_root):
427
                    rows = xpath.get(in_xml_root, in_root, limit=end)
428
                    if rows == []: warnings.warn(UserWarning('Map warning: '
429
                        'Root "'+in_root+'" not found in input'))
430
                    return rows
431
                
432
                def get_value(in_, row):
433
                    nodes = xpath.get(row, in_, allow_rooted=False)
434
                    if nodes != []: return xml_dom.value(nodes[0])
435
                    else: return None
436
                
437
                row_ct = map_rows(get_value, get_rows(doc2rows))
438
        else: # input is CSV
439
            reader, col_names = csvs.reader_and_header(sys.stdin)
440
            row_ct = map_table(col_names, reader)
441
        
442
        return row_ct
443
    
444
    def process_inputs(root, row_ready):
445
        row_ct = 0
446
        for map_path in map_paths:
447
            row_ct += process_input(root, row_ready, map_path)
448
        return row_ct
449
    
450
    pool.share_vars(locals())
451
    if out_is_db:
452
        try:
453
            if redo: sql.empty_db(out_db)
454
            pool.share_vars(locals())
455
            
456
            def row_ready(row_num, input_row):
457
                row_str_ = [None]
458
                def row_str():
459
                    if row_str_[0] == None:
460
                        # Row # is interally 0-based, but 1-based to the user
461
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
462
                            +'\n'+term.emph('input row:')+'\n'
463
                            +strings.ustr(input_row))
464
                        if verbose_errors: row_str_[0] += ('\n'
465
                            +term.emph('output row:')+'\n'+strings.ustr(root))
466
                    return row_str_[0]
467
                
468
                if debug: log_debug(row_str()) # only calc if debug
469
                
470
                def on_error(e):
471
                    exc.add_msg(e, row_str())
472
                    ex_tracker.track(e, row_num, detail=verbose_errors)
473
                pool.share_vars(locals())
474
                
475
                row_root = root.cloneNode(True) # deep copy so don't modify root
476
                xml_func.process(row_root, on_error, is_rel_func, out_db)
477
                if debug: log_debug('Putting processed:\n'
478
                    +strings.ustr(row_root)) # only calc if debug
479
                if not xml_dom.is_empty(row_root):
480
                    assert xml_dom.has_one_child(row_root)
481
                    try:
482
                        sql.with_savepoint(out_db,
483
                            lambda: db_xml.put(out_db, row_root.firstChild,
484
                                row_ins_ct_ref, on_error, col_defaults))
485
                    except sql.DatabaseErrors, e: on_error(e)
486
            
487
            row_ct = process_inputs(root, row_ready)
488
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
489
                ' new rows into database\n')
490
            sys.stdout.flush()
491
            
492
            # Consume asynchronous tasks
493
            pool.main_loop()
494
        finally: out_db.close()
495
    else:
496
        def on_error(e): ex_tracker.track(e)
497
        def row_ready(row_num, input_row): pass
498
        row_ct = process_inputs(root, row_ready)
499
        xml_func.process(root, on_error)
500
        if out_is_xml_ref[0]:
501
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
502
        else: # output is CSV
503
            raise NotImplementedError('CSV output not supported yet')
504
    
505
    # Consume any asynchronous tasks not already consumed above
506
    pool.main_loop()
507
    
508
    profiler.stop(row_ct)
509
    if not by_col: ex_tracker.add_iters(row_ct) # only if errors are done by row
510
    log('Processed '+str(row_ct)+' input rows')
511
    log(profiler.msg())
512
    log(ex_tracker.msg())
513
    ex_tracker.exit()
514

    
515
def main():
516
    try: main_()
517
    except Parser.SyntaxError, e: raise SystemExit(strings.ustr(e))
518

    
519
if __name__ == '__main__':
520
    profile_to = opts.get_env_var('profile_to', None)
521
    if profile_to != None:
522
        import cProfile
523
        sys.stderr.write('Profiling to '+profile_to+'\n')
524
        cProfile.run(main.func_code, profile_to)
525
    else: main()
(43-43/85)