Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# Multi-safe (supports an input appearing multiple times).
5
# For outputting an XML file to a PostgreSQL database, use the general format of
6
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
7
# Duplicate-column safe (supports multiple columns of the same name, which will
8
# be combined)
9

    
10
import copy
11
import csv
12
import itertools
13
import os.path
14
import warnings
15
import sys
16
import xml.dom.minidom as minidom
17

    
18
sys.path.append(os.path.dirname(__file__)+"/../lib")
19

    
20
import csvs
21
import db_xml
22
import exc
23
import ints
24
import iters
25
import maps
26
import opts
27
import parallelproc
28
import Parser
29
import profiling
30
import sql
31
import sql_gen
32
import sql_io
33
import streams
34
import strings
35
import term
36
import util
37
import xpath
38
import xml_dom
39
import xml_func
40
import xml_parse
41

    
42
metadata_prefix = ':'
43
collision_suffix = '/_alt/'
44

    
45
def get_with_prefix(map_, prefixes, key):
46
    '''Gets all entries for the given key with any of the given prefixes
47
    @return tuple(found_key, found_value)
48
    '''
49
    values = []
50
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
51
        try: value = map_[key_]
52
        except KeyError, e: continue # keep going
53
        values.append((key_, value))
54
    
55
    if values != []: return values
56
    else: raise e # re-raise last KeyError
57

    
58
def is_metadata(str_): return str_.startswith(metadata_prefix)
59

    
60
def metadata_value(name):
61
    removed_ref = [False]
62
    name = strings.remove_prefix(metadata_prefix, name, removed_ref)
63
    if removed_ref[0]: return name
64
    else: return None
65

    
66
def cleanup(val):
67
    if val == None: return val
68
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
69

    
70
def main_():
71
    env_names = []
72
    def usage_err():
73
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
74
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
75
            'Note: Row #s start with 1')
76
    
77
    ## Get config from env vars
78
    
79
    # Modes
80
    test = opts.env_flag('test', False, env_names)
81
    commit = opts.env_flag('commit', False, env_names) and not test
82
        # never commit in test mode
83
    redo = opts.env_flag('redo', False, env_names) and not commit
84
        # never redo in commit mode (run `make schemas/reinstall` instead)
85
    
86
    # Ranges
87
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
88
    # Make start interally 0-based.
89
    # It's 1-based to the user to match up with the staging table row #s.
90
    start -= 1
91
    if test: n_default = 1
92
    else: n_default = None
93
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
94
        u''))
95
    end = n
96
    if end != None: end += start
97
    
98
    # Debugging
99
    verbosity = util.cast(float, opts.get_env_var('verbosity', None, env_names))
100
    opts.get_env_var('profile_to', None, env_names) # add to env_names
101
    
102
    # DB
103
    def get_db_config(prefix):
104
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
105
    in_db_config = get_db_config('in')
106
    out_db_config = get_db_config('out')
107
    in_is_db = 'engine' in in_db_config
108
    out_is_db = 'engine' in out_db_config
109
    in_schema = opts.get_env_var('in_schema', None, env_names)
110
    in_table = opts.get_env_var('in_table', None, env_names)
111
    if in_schema != None:
112
        for config in [in_db_config, out_db_config]:
113
            config['schemas'] += ','+in_schema
114
    
115
    # Optimization
116
    cache_sql = opts.env_flag('cache_sql', True, env_names)
117
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
118
        env_names) # by-column optimization only applies if mapping to same DB
119
    if test: cpus_default = 0
120
    else: cpus_default = 0 # or None to use parallel processing by default
121
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
122
        env_names), u''))
123
    
124
    # Set default verbosity. Must happen after by_col is set.
125
    if verbosity == None:
126
        if test: verbosity = 0.5 # automated tests should not be verbose
127
        elif by_col: verbosity = 3 # show all queries to assist debugging
128
        else: verbosity = 1.1 # just show row progress
129
    
130
    # fix verbosity
131
    if by_col and not test: verbosity = ints.set_min(verbosity, 2)
132
        # live column-based import MUST be run with verbosity 2+ (3 preferred)
133
        # to provide debugging information for often-complex errors.
134
        # without this, debugging is effectively impossible.
135
        # automated tests are exempt from this because they output to the screen
136
    
137
    ##
138
    
139
    # Logging
140
    verbose_errors = test and verbosity > 0
141
    debug = verbosity >= 1.5
142
    def log(msg, level=1):
143
        '''Higher level -> more verbose'''
144
        if level <= verbosity:
145
            if verbosity <= 2:
146
                if level == 1.5: msg = '# '+msg # msg is Redmine list item
147
                elif msg.startswith('DB query:'): # remove extra debug info
148
                    first_line, nl, msg = msg.partition('\n')
149
            elif level > 1: msg = '['+str(level)+'] '+msg # include level in msg
150
            
151
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
152
    if debug: log_debug = lambda msg, level=2: log(msg, level)
153
    else: log_debug = sql.log_debug_none
154
    
155
    # Parse args
156
    map_paths = sys.argv[1:]
157
    if map_paths == []:
158
        if in_is_db or not out_is_db: usage_err()
159
        else: map_paths = [None]
160
    
161
    def connect_db(db_config):
162
        log('Connecting to '+sql.db_config_str(db_config))
163
        return sql.connect(db_config, caching=cache_sql, autocommit=commit,
164
            debug_temp=verbosity > 3 and commit, log_debug=log_debug)
165
    
166
    if end != None: end_str = str(end-1) # end is one past the last #
167
    else: end_str = 'end'
168
    log('Processing input rows '+str(start)+'-'+end_str)
169
    
170
    ex_tracker = exc.ExPercentTracker(iter_text='row')
171
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
172
    
173
    # Parallel processing
174
    pool = parallelproc.MultiProducerPool(cpus)
175
    log('Using '+str(pool.process_ct)+' parallel CPUs')
176
    
177
    # Set up DB access
178
    row_ins_ct_ref = [0]
179
    if out_is_db:
180
        out_db = connect_db(out_db_config)
181
        def is_rel_func(name):
182
            return (name in db_xml.put_special_funcs
183
                or sql.function_exists(out_db, sql_gen.Function(name)))
184
    
185
    doc = xml_dom.create_doc()
186
    root = doc.documentElement
187
    out_is_xml_ref = [False]
188
    
189
    in_label_ref = [None]
190
    col_defaults = {}
191
    def update_in_label():
192
        if in_schema != None: os.environ['source'] = in_schema
193
        elif in_label_ref[0] != None: os.environ['source'] = in_label_ref[0]
194
    
195
    def prep_root():
196
        root.clear()
197
        update_in_label()
198
    prep_root()
199
    
200
    def process_input(root, row_ready, map_path):
201
        '''Inputs datasource to XML tree, mapping if needed'''
202
        # Load map header
203
        in_is_xpaths = True
204
        out_is_xpaths = True
205
        out_label = None
206
        if map_path != None:
207
            metadata = []
208
            mappings = []
209
            stream = open(map_path, 'rb')
210
            reader = csv.reader(stream)
211
            in_label, out_label = reader.next()[:2]
212
            
213
            def split_col_name(name):
214
                label, sep, root = name.partition(':')
215
                return label, sep != '', root, []
216
            
217
            in_label, in_root, prefixes = maps.col_info(in_label)
218
            in_is_xpaths = in_root != None
219
            in_label_ref[0] = in_label
220
            update_in_label()
221
            out_label, out_root = maps.col_info(out_label)[:2]
222
            out_is_xpaths = out_root != None
223
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
224
                # outer elements are types
225
            
226
            for row in reader:
227
                in_, out = row[:2]
228
                if out != '': mappings.append([in_, out_root+out])
229
            
230
            stream.close()
231
            
232
            root.ownerDocument.documentElement.tagName = out_label
233
        in_is_xml = in_is_xpaths and not in_is_db
234
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
235
        
236
        def process_rows(process_row, rows, rows_start=0):
237
            '''Processes input rows
238
            @param process_row(in_row, i)
239
            @rows_start The (0-based) row # of the first row in rows. Set this
240
                only if the pre-start rows have already been skipped.
241
            '''
242
            rows = iter(rows)
243
            
244
            if end != None: row_nums = xrange(rows_start, end)
245
            else: row_nums = itertools.count(rows_start)
246
            i = -1
247
            for i in row_nums:
248
                try: row = rows.next()
249
                except StopIteration:
250
                    i -= 1 # last row # didn't count
251
                    break # no more rows
252
                if i < start: continue # not at start row yet
253
                
254
                # Row # is interally 0-based, but 1-based to the user
255
                log('Processing input row #'+str(i+1), level=1.1)
256
                process_row(row, i)
257
                row_ready(i, row)
258
            row_ct = i-start+1
259
            return row_ct
260
        
261
        def map_rows(get_value, rows, **kw_args):
262
            '''Maps input rows
263
            @param get_value(in_, row):str
264
            '''
265
            # Prevent collisions if multiple inputs mapping to same output
266
            outputs_idxs = dict()
267
            for i, mapping in enumerate(mappings):
268
                in_, out = mapping
269
                default = util.NamedTuple(count=1, first=i)
270
                idxs = outputs_idxs.setdefault(out, default)
271
                if idxs is not default: # key existed, so there was a collision
272
                    if idxs.count == 1: # first key does not yet have suffix
273
                        mappings[idxs.first][1] += collision_suffix+'0'
274
                    mappings[i][1] += collision_suffix+str(idxs.count)
275
                    idxs.count += 1
276
            
277
            id_node = None
278
            if out_is_db:
279
                mappings_orig = mappings[:] # save a copy
280
                mappings[:] = [] # empty existing elements
281
                for in_, out in mappings_orig:
282
                    in_str = strings.ustr(in_)
283
                    is_metadata_ = is_metadata(in_str)
284
                    if is_metadata_: value = metadata_value(in_str)
285
                    else: value = '$'+in_str # mark as name
286
                    
287
                    # All put_obj()s should return the same id_node
288
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
289
                        value) # value is placeholder that documents name
290
                    if not is_metadata_: mappings.append([in_, nodes])
291
                if id_node == None:
292
                    warnings.warn(UserWarning('Map warning: No mappings or no '
293
                        'column name matches. Are you importing the correct '
294
                        'input table?'))
295
                xml_func.simplify(root)
296
                sys.stdout.write(strings.to_raw_str('Put template:\n'
297
                    +strings.ustr(root)))
298
                sys.stdout.flush()
299
            
300
            def process_row(row, i):
301
                row_id = str(i)
302
                if id_node != None: xml_dom.set_value(id_node, row_id)
303
                for in_, out in mappings:
304
                    log_debug('Getting '+strings.ustr(in_))
305
                    value = cleanup(get_value(in_, row))
306
                    log_debug('Putting '+strings.urepr(value)+' to '
307
                        +strings.ustr(out))
308
                    if out_is_db: # out is list of XML nodes
309
                        for node in out: xml_dom.set_value(node, value)
310
                    elif value != None: # out is XPath
311
                        xpath.put_obj(root, out, row_id, has_types, value)
312
            return process_rows(process_row, rows, **kw_args)
313
        
314
        def map_table(col_names, rows, **kw_args):
315
            col_names_ct = len(col_names)
316
            col_idxs = util.list_flip(col_names)
317
            col_names_map = dict(zip(col_names, col_names))
318
            prefixes_simp = map(maps.simplify, prefixes)
319
            
320
            # Resolve prefixes
321
            mappings_orig = mappings[:] # save a copy
322
            mappings[:] = [] # empty existing elements
323
            for in_, out in mappings_orig:
324
                if is_metadata(in_): mappings.append([in_, out])
325
                else:
326
                    try:
327
                        cols = get_with_prefix(col_names_map, prefixes_simp,
328
                            in_)
329
                    except KeyError: pass
330
                    else:
331
                        mappings[len(mappings):] = [[db_xml.ColRef(
332
                            orig, col_idxs[orig]), out] for simp, orig in cols]
333
                            # can't use += because that uses =
334
            
335
            def get_value(in_, row): return row.list[in_.idx]
336
            def wrap_row(row):
337
                return util.ListDict(util.list_as_length(row, col_names_ct),
338
                    col_names, col_idxs) # handle CSV rows of different lengths
339
            
340
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
341
        
342
        if in_is_db:
343
            def on_error(e): ex_tracker.track(e)
344
            
345
            if by_col: in_db = out_db
346
            else: in_db = connect_db(in_db_config)
347
            
348
            # Get table and schema name
349
            schema = in_schema # modified, so can't have same name as outer var
350
            table = in_table # modified, so can't have same name as outer var
351
            if table == None:
352
                assert in_is_xpaths
353
                schema, sep, table = in_root.partition('.')
354
                if sep == '': # only the table name was specified
355
                    table = schema
356
                    schema = None
357
            table = sql_gen.Table(table, schema)
358
            
359
            # Fetch rows
360
            if by_col: limit = 0 # only fetch column names
361
            else: limit = n
362
            try:
363
                cur = sql.select(in_db, table, limit=limit, start=start,
364
                    recover=True, cacheable=False)
365
            except sql.DoesNotExistException:
366
                table = None
367
                col_names = []
368
                rows = []
369
            else:
370
                col_names = list(sql.col_names(cur))
371
                rows = sql.rows(cur)
372
            
373
            # inline metadata value columns
374
            col_default_values = {}
375
            for col_name in col_names:
376
                col = sql_gen.Col(col_name, table)
377
                if sql.col_is_constant(in_db, col):
378
                    col_default_values[col_name] = (metadata_prefix +
379
                        sql.col_default_value(in_db, col))
380
            for i, mapping in enumerate(mappings):
381
                in_, out = mapping
382
                mappings[i] = (col_default_values.get(in_, in_), out)
383
            
384
            if by_col:
385
                map_table(col_names, []) # just create the template
386
                
387
                if table != None and start == 0 and n == None: # full re-import
388
                    log('Clearing errors table')
389
                    errors_table_ = sql_io.errors_table(in_db, table)
390
                    if errors_table_ != None:
391
                        sql.drop_table(in_db, errors_table_)
392
                
393
                # Strip XML functions not in the DB
394
                xml_func.process(root, is_rel_func=is_rel_func)
395
                if debug: log_debug('Putting stripped:\n'+strings.ustr(root))
396
                    # only calc if debug
397
                
398
                # Import rows
399
                in_row_ct_ref = [0]
400
                db_xml.put_table(in_db, root.firstChild, table, in_row_ct_ref,
401
                    row_ins_ct_ref, n, start, on_error, col_defaults)
402
                row_ct = in_row_ct_ref[0]
403
            else:
404
                # Use normal by-row method
405
                row_ct = map_table(col_names, rows, rows_start=start)
406
                    # rows_start: pre-start rows have been skipped
407
                
408
                in_db.db.close()
409
        elif in_is_xml:
410
            stdin = streams.LineCountStream(sys.stdin)
411
            def on_error(e):
412
                exc.add_msg(e, term.emph('input line #:')+' '
413
                    +str(stdin.line_num))
414
                ex_tracker.track(e)
415
            
416
            def get_rows(doc2rows):
417
                return iters.flatten(itertools.imap(doc2rows,
418
                    xml_parse.docs_iter(stdin, on_error)))
419
            
420
            if map_path == None:
421
                def doc2rows(in_xml_root):
422
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
423
                    util.skip(iter_, xml_dom.is_text) # skip metadata
424
                    return iter_
425
                
426
                row_ct = process_rows(lambda row, i: root.appendChild(row),
427
                    get_rows(doc2rows))
428
            else:
429
                def doc2rows(in_xml_root):
430
                    rows = xpath.get(in_xml_root, in_root, limit=end)
431
                    if rows == []: warnings.warn(UserWarning('Map warning: '
432
                        'Root "'+in_root+'" not found in input'))
433
                    return rows
434
                
435
                def get_value(in_, row):
436
                    in_ = './{'+(','.join(strings.with_prefixes(
437
                        ['']+prefixes, in_)))+'}' # also with no prefix
438
                    nodes = xpath.get(row, in_, allow_rooted=False)
439
                    if nodes != []: return xml_dom.value(nodes[0])
440
                    else: return None
441
                
442
                row_ct = map_rows(get_value, get_rows(doc2rows))
443
        else: # input is CSV
444
            reader, col_names = csvs.reader_and_header(sys.stdin)
445
            row_ct = map_table(col_names, reader)
446
        
447
        return row_ct
448
    
449
    def process_inputs(root, row_ready):
450
        row_ct = 0
451
        for map_path in map_paths:
452
            row_ct += process_input(root, row_ready, map_path)
453
        return row_ct
454
    
455
    pool.share_vars(locals())
456
    if out_is_db:
457
        try:
458
            if redo: sql.empty_db(out_db)
459
            pool.share_vars(locals())
460
            
461
            def row_ready(row_num, input_row):
462
                row_str_ = [None]
463
                def row_str():
464
                    if row_str_[0] == None:
465
                        # Row # is interally 0-based, but 1-based to the user
466
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
467
                            +'\n'+term.emph('input row:')+'\n'
468
                            +strings.ustr(input_row))
469
                        if verbose_errors: row_str_[0] += ('\n'
470
                            +term.emph('output row:')+'\n'+strings.ustr(root))
471
                    return row_str_[0]
472
                
473
                if debug: log_debug(row_str()) # only calc if debug
474
                
475
                def on_error(e):
476
                    exc.add_msg(e, row_str())
477
                    ex_tracker.track(e, row_num, detail=verbose_errors)
478
                pool.share_vars(locals())
479
                
480
                row_root = root.cloneNode(True) # deep copy so don't modify root
481
                xml_func.process(row_root, on_error, is_rel_func, out_db)
482
                if debug: log_debug('Putting processed:\n'
483
                    +strings.ustr(row_root)) # only calc if debug
484
                if not xml_dom.is_empty(row_root):
485
                    assert xml_dom.has_one_child(row_root)
486
                    try:
487
                        sql.with_savepoint(out_db,
488
                            lambda: db_xml.put(out_db, row_root.firstChild,
489
                                row_ins_ct_ref, on_error, col_defaults))
490
                    except sql.DatabaseErrors, e: on_error(e)
491
            
492
            row_ct = process_inputs(root, row_ready)
493
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
494
                ' new rows into database\n')
495
            sys.stdout.flush()
496
            
497
            # Consume asynchronous tasks
498
            pool.main_loop()
499
        finally: out_db.close()
500
    else:
501
        def on_error(e): ex_tracker.track(e)
502
        def row_ready(row_num, input_row): pass
503
        row_ct = process_inputs(root, row_ready)
504
        xml_func.process(root, on_error)
505
        if out_is_xml_ref[0]:
506
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
507
        else: # output is CSV
508
            raise NotImplementedError('CSV output not supported yet')
509
    
510
    # Consume any asynchronous tasks not already consumed above
511
    pool.main_loop()
512
    
513
    profiler.stop(row_ct)
514
    if not by_col: ex_tracker.add_iters(row_ct) # only if errors are done by row
515
    log('Processed '+str(row_ct)+' input rows')
516
    log(profiler.msg())
517
    log(ex_tracker.msg())
518
    ex_tracker.exit()
519

    
520
def main():
521
    try: main_()
522
    except Parser.SyntaxError, e: raise SystemExit(strings.ustr(e))
523

    
524
if __name__ == '__main__':
525
    profile_to = opts.get_env_var('profile_to', None)
526
    if profile_to != None:
527
        import cProfile
528
        sys.stderr.write('Profiling to '+profile_to+'\n')
529
        cProfile.run(main.func_code, profile_to)
530
    else: main()
(42-42/84)