Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# Multi-safe (supports an input appearing multiple times).
5
# For outputting an XML file to a PostgreSQL database, use the general format of
6
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
7
# Duplicate-column safe (supports multiple columns of the same name, which will
8
# be combined)
9

    
10
import copy
11
import csv
12
import itertools
13
import os.path
14
import warnings
15
import sys
16
import xml.dom.minidom as minidom
17

    
18
sys.path.append(os.path.dirname(__file__)+"/../lib")
19

    
20
import csvs
21
import db_xml
22
import exc
23
import ints
24
import iters
25
import maps
26
import opts
27
import parallelproc
28
import Parser
29
import profiling
30
import sql
31
import sql_gen
32
import sql_io
33
import streams
34
import strings
35
import term
36
import util
37
import xpath
38
import xml_dom
39
import xml_func
40
import xml_parse
41

    
42
metadata_prefix = ':'
43
collision_suffix = '/_alt/'
44

    
45
def get_with_prefix(map_, prefixes, key):
46
    '''Gets all entries for the given key with any of the given prefixes
47
    @return tuple(found_key, found_value)
48
    '''
49
    values = []
50
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
51
        try: value = map_[key_]
52
        except KeyError, e: continue # keep going
53
        values.append((key_, value))
54
    
55
    if values != []: return values
56
    else: raise e # re-raise last KeyError
57

    
58
def is_metadata(str_): return str_.startswith(metadata_prefix)
59

    
60
def metadata_value(name):
61
    removed_ref = [False]
62
    name = strings.remove_prefix(metadata_prefix, name, removed_ref)
63
    if removed_ref[0]: return name
64
    else: return None
65

    
66
def cleanup(val):
67
    if val == None: return val
68
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
69

    
70
def main_():
71
    env_names = []
72
    def usage_err():
73
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
74
            +sys.argv[0]+''' [map_path...] [<input] [>output]
75
note: row #s start with 1
76
verbosity > 3 in commit mode turns on debug_temp mode, which creates real tables
77
instead of temp tables
78
''')
79
    
80
    ## Get config from env vars
81
    
82
    # Modes
83
    test = opts.env_flag('test', False, env_names)
84
    commit = opts.env_flag('commit', False, env_names) and not test
85
        # never commit in test mode
86
    redo = opts.env_flag('redo', False, env_names) and not commit
87
        # never redo in commit mode (run `make schemas/reinstall` instead)
88
    
89
    # Ranges
90
    start = util.cast(int, util.coalesce(util.none_if(
91
        opts.get_env_var('start', None, env_names), u''), 1)) # 1-based
92
    # Make start interally 0-based.
93
    # It's 1-based to the user to match up with the staging table row #s.
94
    start -= 1
95
    if test: n_default = 1
96
    else: n_default = None
97
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
98
        u''))
99
    end = n
100
    if end != None: end += start
101
    
102
    # Debugging
103
    verbosity = util.cast(float, opts.get_env_var('verbosity', None, env_names))
104
    opts.get_env_var('profile_to', None, env_names) # add to env_names
105
    
106
    # DB
107
    def get_db_config(prefix):
108
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
109
    in_db_config = get_db_config('in')
110
    out_db_config = get_db_config('out')
111
    in_is_db = 'engine' in in_db_config
112
    out_is_db = 'engine' in out_db_config
113
    in_schema = opts.get_env_var('in_schema', None, env_names)
114
    in_table = opts.get_env_var('in_table', None, env_names)
115
    if in_schema != None:
116
        for config in [in_db_config, out_db_config]:
117
            config['schemas'] += ','+in_schema
118
    
119
    # Optimization
120
    cache_sql = opts.env_flag('cache_sql', True, env_names)
121
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
122
        env_names) # by-column optimization only applies if mapping to same DB
123
    if test: cpus_default = 0
124
    else: cpus_default = 0 # or None to use parallel processing by default
125
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
126
        env_names), u''))
127
    
128
    # Set default verbosity. Must happen after by_col is set.
129
    if verbosity == None:
130
        if test: verbosity = 0.5 # automated tests should not be verbose
131
        elif by_col: verbosity = 3 # show all queries to assist debugging
132
        else: verbosity = 1.1 # just show row progress
133
    
134
    # fix verbosity
135
    if by_col and not test: verbosity = ints.set_min(verbosity, 2)
136
        # live column-based import MUST be run with verbosity 2+ (3 preferred)
137
        # to provide debugging information for often-complex errors.
138
        # without this, debugging is effectively impossible.
139
        # automated tests are exempt from this because they output to the screen
140
    
141
    ##
142
    
143
    # Logging
144
    verbose_errors = test and verbosity > 0
145
    debug = verbosity >= 1.5
146
    def log(msg, level=1):
147
        '''Higher level -> more verbose'''
148
        if level <= verbosity:
149
            if verbosity <= 2:
150
                if level == 1.5: msg = '# '+msg # msg is Redmine list item
151
                elif msg.startswith('DB query:'): # remove extra debug info
152
                    first_line, nl, msg = msg.partition('\n')
153
            elif level > 1: msg = '['+str(level)+'] '+msg # include level in msg
154
            
155
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
156
    if debug: log_debug = lambda msg, level=2: log(msg, level)
157
    else: log_debug = sql.log_debug_none
158
    
159
    # Parse args
160
    map_paths = sys.argv[1:]
161
    if map_paths == []:
162
        if in_is_db or not out_is_db: usage_err()
163
        else: map_paths = [None]
164
    
165
    def connect_db(db_config):
166
        log('Connecting to '+sql.db_config_str(db_config))
167
        return sql.connect(db_config, caching=cache_sql, autocommit=commit,
168
            debug_temp=verbosity > 3 and commit, log_debug=log_debug)
169
    
170
    if end != None: end_str = str(end-1) # end is one past the last #
171
    else: end_str = 'end'
172
    log('Processing input rows '+str(start)+'-'+end_str)
173
    
174
    ex_tracker = exc.ExPercentTracker(iter_text='row')
175
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
176
    
177
    # Parallel processing
178
    pool = parallelproc.MultiProducerPool(cpus)
179
    log('Using '+str(pool.process_ct)+' parallel CPUs')
180
    
181
    # Set up DB access
182
    row_ins_ct_ref = [0]
183
    if out_is_db:
184
        out_db = connect_db(out_db_config)
185
        def is_rel_func(name):
186
            return (name in db_xml.put_special_funcs
187
                or sql.function_exists(out_db, sql_gen.Function(name)))
188
    
189
    doc = xml_dom.create_doc()
190
    root = doc.documentElement
191
    out_is_xml_ref = [False]
192
    
193
    in_label_ref = [None]
194
    col_defaults = {}
195
    def update_in_label():
196
        os.environ.setdefault('source', in_schema)
197
        os.environ.setdefault('source', in_label_ref[0])
198
    
199
    def prep_root():
200
        root.clear()
201
        update_in_label()
202
    prep_root()
203
    
204
    def process_input(root, row_ready, map_path):
205
        '''Inputs datasource to XML tree, mapping if needed'''
206
        # Load map header
207
        in_is_xpaths = True
208
        out_is_xpaths = True
209
        out_label = None
210
        if map_path != None:
211
            metadata = []
212
            mappings = []
213
            stream = open(map_path, 'rb')
214
            reader = csv.reader(stream)
215
            in_label, out_label = reader.next()[:2]
216
            
217
            def split_col_name(name):
218
                label, sep, root = name.partition(':')
219
                return label, sep != '', root, []
220
            
221
            in_label, in_root, prefixes = maps.col_info(in_label)
222
            in_is_xpaths = in_root != None
223
            in_label_ref[0] = in_label
224
            update_in_label()
225
            out_label, out_root = maps.col_info(out_label)[:2]
226
            out_is_xpaths = out_root != None
227
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
228
                # outer elements are types
229
            
230
            for row in reader:
231
                in_, out = row[:2]
232
                if out != '': mappings.append([in_, out_root+out])
233
            
234
            stream.close()
235
            
236
            root.ownerDocument.documentElement.tagName = out_label
237
        in_is_xml = in_is_xpaths and not in_is_db
238
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
239
        
240
        def process_rows(process_row, rows, rows_start=0):
241
            '''Processes input rows
242
            @param process_row(in_row, i)
243
            @rows_start The (0-based) row # of the first row in rows. Set this
244
                only if the pre-start rows have already been skipped.
245
            '''
246
            rows = iter(rows)
247
            
248
            if end != None: row_nums = xrange(rows_start, end)
249
            else: row_nums = itertools.count(rows_start)
250
            i = -1
251
            for i in row_nums:
252
                try: row = rows.next()
253
                except StopIteration:
254
                    i -= 1 # last row # didn't count
255
                    break # no more rows
256
                if i < start: continue # not at start row yet
257
                
258
                # Row # is interally 0-based, but 1-based to the user
259
                log('Processing input row #'+str(i+1), level=1.1)
260
                process_row(row, i)
261
                row_ready(i, row)
262
            row_ct = i-start+1
263
            return row_ct
264
        
265
        def map_rows(get_value, rows, **kw_args):
266
            '''Maps input rows
267
            @param get_value(in_, row):str
268
            '''
269
            # Prevent collisions if multiple inputs mapping to same output
270
            outputs_idxs = dict()
271
            for i, mapping in enumerate(mappings):
272
                in_, out = mapping
273
                default = util.NamedTuple(count=1, first=i)
274
                idxs = outputs_idxs.setdefault(out, default)
275
                if idxs is not default: # key existed, so there was a collision
276
                    if idxs.count == 1: # first key does not yet have suffix
277
                        mappings[idxs.first][1] += collision_suffix+'0'
278
                    mappings[i][1] += collision_suffix+str(idxs.count)
279
                    idxs.count += 1
280
            
281
            id_node = None
282
            if out_is_db:
283
                mappings_orig = mappings[:] # save a copy
284
                mappings[:] = [] # empty existing elements
285
                for in_, out in mappings_orig:
286
                    in_str = strings.ustr(in_)
287
                    is_metadata_ = is_metadata(in_str)
288
                    if is_metadata_: value = metadata_value(in_str)
289
                    else: value = '$'+in_str # mark as name
290
                    
291
                    # All put_obj()s should return the same id_node
292
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
293
                        value) # value is placeholder that documents name
294
                    if not is_metadata_: mappings.append([in_, nodes])
295
                if id_node == None:
296
                    warnings.warn(UserWarning('Map warning: No mappings or no '
297
                        'column name matches. Are you importing the correct '
298
                        'input table?'))
299
                xml_func.simplify(root)
300
                sys.stdout.write(strings.to_raw_str('<!--put template-->\n'
301
                    +strings.ustr(root)))
302
                sys.stdout.flush()
303
            
304
            def process_row(row, i):
305
                row_id = str(i)
306
                if id_node != None: xml_dom.set_value(id_node, row_id)
307
                for in_, out in mappings:
308
                    log_debug('Getting '+strings.ustr(in_))
309
                    value = cleanup(get_value(in_, row))
310
                    log_debug('Putting '+strings.urepr(value)+' to '
311
                        +strings.ustr(out))
312
                    if out_is_db: # out is list of XML nodes
313
                        for node in out: xml_dom.set_value(node, value)
314
                    elif value != None: # out is XPath
315
                        xpath.put_obj(root, out, row_id, has_types, value)
316
            return process_rows(process_row, rows, **kw_args)
317
        
318
        def map_table(col_names, rows, **kw_args):
319
            col_names_ct = len(col_names)
320
            col_idxs = util.list_flip(col_names)
321
            col_names_map = dict(zip(col_names, col_names))
322
            
323
            # Resolve names
324
            mappings_orig = mappings[:] # save a copy
325
            mappings[:] = [] # empty existing elements
326
            for in_, out in mappings_orig:
327
                if is_metadata(in_): mappings.append([in_, out])
328
                else:
329
                    try: cols = get_with_prefix(col_names_map, [], in_)
330
                    except KeyError: pass
331
                    else:
332
                        mappings[len(mappings):] = [[db_xml.ColRef(
333
                            orig, col_idxs[orig]), out] for simp, orig in cols]
334
                            # can't use += because that uses =
335
            
336
            def get_value(in_, row): return row.list[in_.idx]
337
            def wrap_row(row):
338
                return util.ListDict(util.list_as_length(row, col_names_ct),
339
                    col_names, col_idxs) # handle CSV rows of different lengths
340
            
341
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
342
        
343
        if in_is_db:
344
            def on_error(e): ex_tracker.track(e)
345
            
346
            if by_col: in_db = out_db
347
            else: in_db = connect_db(in_db_config)
348
            
349
            # Get table and schema name
350
            schema = in_schema # modified, so can't have same name as outer var
351
            table = in_table # modified, so can't have same name as outer var
352
            if table == None:
353
                assert in_is_xpaths
354
                schema, sep, table = in_root.partition('.')
355
                if sep == '': # only the table name was specified
356
                    table = schema
357
                    schema = None
358
            table = sql_gen.Table(table, schema)
359
            
360
            # Fetch rows
361
            if by_col: limit = 0 # only fetch column names
362
            else: limit = n
363
            try:
364
                cur = sql.select(in_db, table, limit=limit, start=start,
365
                    recover=True, cacheable=False)
366
            except sql.DoesNotExistException:
367
                table = None
368
                col_names = []
369
                rows = []
370
            else:
371
                col_names = list(sql.col_names(cur))
372
                rows = sql.rows(cur)
373
            
374
            # inline metadata value columns
375
            col_default_values = {}
376
            for col_name in col_names:
377
                col = sql_gen.Col(col_name, table)
378
                if sql.col_is_constant(in_db, col):
379
                    col_default_values[col_name] = (metadata_prefix +
380
                        sql.col_default_value(in_db, col))
381
            for i, mapping in enumerate(mappings):
382
                in_, out = mapping
383
                mappings[i] = (col_default_values.get(in_, in_), out)
384
            
385
            if by_col:
386
                map_table(col_names, []) # just create the template
387
                
388
                if table != None and start == 0 and n == None: # full re-import
389
                    log('Clearing errors table')
390
                    errors_table_ = sql_io.errors_table(in_db, table)
391
                    if errors_table_ != None:
392
                        sql.drop_table(in_db, errors_table_)
393
                
394
                # Strip XML functions not in the DB
395
                xml_func.process(root, is_rel_func=is_rel_func)
396
                if debug: log_debug('Putting stripped:\n'+strings.ustr(root))
397
                    # only calc if debug
398
                
399
                # Import rows
400
                in_row_ct_ref = [0]
401
                db_xml.put_table(in_db, root.firstChild, table, in_row_ct_ref,
402
                    row_ins_ct_ref, n, start, on_error, col_defaults)
403
                row_ct = in_row_ct_ref[0]
404
            else:
405
                # Use normal by-row method
406
                row_ct = map_table(col_names, rows, rows_start=start)
407
                    # rows_start: pre-start rows have been skipped
408
                
409
                in_db.db.close()
410
        elif in_is_xml:
411
            stdin = streams.LineCountStream(sys.stdin)
412
            def on_error(e):
413
                exc.add_msg(e, term.emph('input line #:')+' '
414
                    +str(stdin.line_num))
415
                ex_tracker.track(e)
416
            
417
            def get_rows(doc2rows):
418
                return iters.flatten(itertools.imap(doc2rows,
419
                    xml_parse.docs_iter(stdin, on_error)))
420
            
421
            if map_path == None:
422
                def doc2rows(in_xml_root):
423
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
424
                    util.skip(iter_, xml_dom.is_text) # skip metadata
425
                    return iter_
426
                
427
                row_ct = process_rows(lambda row, i: root.appendChild(row),
428
                    get_rows(doc2rows))
429
            else:
430
                def doc2rows(in_xml_root):
431
                    rows = xpath.get(in_xml_root, in_root, limit=end)
432
                    if rows == []: warnings.warn(UserWarning('Map warning: '
433
                        'Root "'+in_root+'" not found in input'))
434
                    return rows
435
                
436
                def get_value(in_, row):
437
                    nodes = xpath.get(row, in_, allow_rooted=False)
438
                    if nodes != []: return xml_dom.value(nodes[0])
439
                    else: return None
440
                
441
                row_ct = map_rows(get_value, get_rows(doc2rows))
442
        else: # input is CSV
443
            reader, col_names = csvs.reader_and_header(sys.stdin)
444
            row_ct = map_table(col_names, reader)
445
        
446
        return row_ct
447
    
448
    def process_inputs(root, row_ready):
449
        row_ct = 0
450
        for map_path in map_paths:
451
            row_ct += process_input(root, row_ready, map_path)
452
        return row_ct
453
    
454
    pool.share_vars(locals())
455
    if out_is_db:
456
        try:
457
            if redo: sql.empty_db(out_db)
458
            pool.share_vars(locals())
459
            
460
            def row_ready(row_num, input_row):
461
                row_str_ = [None]
462
                def row_str():
463
                    if row_str_[0] == None:
464
                        # Row # is interally 0-based, but 1-based to the user
465
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
466
                            +'\n'+term.emph('input row:')+'\n'
467
                            +strings.ustr(input_row))
468
                        if verbose_errors: row_str_[0] += ('\n'
469
                            +term.emph('output row:')+'\n'+strings.ustr(root))
470
                    return row_str_[0]
471
                
472
                if debug: log_debug(row_str()) # only calc if debug
473
                
474
                def on_error(e):
475
                    exc.add_msg(e, row_str())
476
                    ex_tracker.track(e, row_num, detail=verbose_errors)
477
                pool.share_vars(locals())
478
                
479
                row_root = root.cloneNode(True) # deep copy so don't modify root
480
                xml_func.process(row_root, on_error, is_rel_func, out_db)
481
                if debug: log_debug('Putting processed:\n'
482
                    +strings.ustr(row_root)) # only calc if debug
483
                if not xml_dom.is_empty(row_root):
484
                    assert xml_dom.has_one_child(row_root)
485
                    try:
486
                        sql.with_savepoint(out_db,
487
                            lambda: db_xml.put(out_db, row_root.firstChild,
488
                                row_ins_ct_ref, on_error, col_defaults))
489
                    except sql.DatabaseErrors, e: on_error(e)
490
            
491
            row_ct = process_inputs(root, row_ready)
492
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
493
                ' new rows into database\n')
494
            sys.stdout.flush()
495
            
496
            # Consume asynchronous tasks
497
            pool.main_loop()
498
        finally: out_db.close()
499
    else:
500
        def on_error(e): ex_tracker.track(e)
501
        def row_ready(row_num, input_row): pass
502
        row_ct = process_inputs(root, row_ready)
503
        xml_func.process(root, on_error)
504
        if out_is_xml_ref[0]:
505
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
506
        else: # output is CSV
507
            raise NotImplementedError('CSV output not supported yet')
508
    
509
    # Consume any asynchronous tasks not already consumed above
510
    pool.main_loop()
511
    
512
    profiler.stop(row_ct)
513
    if not by_col: ex_tracker.add_iters(row_ct) # only if errors are done by row
514
    log('Processed '+str(row_ct)+' input rows')
515
    log(profiler.msg())
516
    log(ex_tracker.msg())
517
    ex_tracker.exit()
518

    
519
def main():
520
    try: main_()
521
    except Parser.SyntaxError, e: raise SystemExit(strings.ustr(e))
522

    
523
if __name__ == '__main__':
524
    profile_to = opts.get_env_var('profile_to', None)
525
    if profile_to != None:
526
        import cProfile
527
        sys.stderr.write('Profiling to '+profile_to+'\n')
528
        cProfile.run(main.func_code, profile_to)
529
    else: main()
(43-43/85)