Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# Multi-safe (supports an input appearing multiple times).
5
# For outputting an XML file to a PostgreSQL database, use the general format of
6
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
7
# Duplicate-column safe (supports multiple columns of the same name, which will
8
# be combined)
9

    
10
import copy
11
import csv
12
import itertools
13
import os.path
14
import warnings
15
import sys
16
import xml.dom.minidom as minidom
17

    
18
sys.path.append(os.path.dirname(__file__)+"/../lib")
19

    
20
import csvs
21
import db_xml
22
import exc
23
import ints
24
import iters
25
import maps
26
import opts
27
import parallelproc
28
import Parser
29
import profiling
30
import sql
31
import sql_gen
32
import sql_io
33
import streams
34
import strings
35
import term
36
import util
37
import xpath
38
import xml_dom
39
import xml_func
40
import xml_parse
41

    
42
metadata_prefix = ':'
43
collision_suffix = '/_alt/'
44

    
45
def get_with_prefix(map_, prefixes, key):
46
    '''Gets all entries for the given key with any of the given prefixes
47
    @return tuple(found_key, found_value)
48
    '''
49
    values = []
50
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
51
        try: value = map_[key_]
52
        except KeyError, e: continue # keep going
53
        values.append((key_, value))
54
    
55
    if values != []: return values
56
    else: raise e # re-raise last KeyError
57

    
58
def is_metadata(str_): return str_.startswith(metadata_prefix)
59

    
60
def metadata_value(name):
61
    removed_ref = [False]
62
    name = strings.remove_prefix(metadata_prefix, name, removed_ref)
63
    if removed_ref[0]: return name
64
    else: return None
65

    
66
def cleanup(val):
67
    if val == None: return val
68
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
69

    
70
def main_():
71
    env_names = []
72
    def usage_err():
73
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
74
            +sys.argv[0]+''' [map_path...] [<input] [>output]
75
note: row #s start with 1
76
verbosity > 3 in commit mode turns on debug_temp mode, which creates real tables
77
instead of temp tables
78
''')
79
    
80
    ## Get config from env vars
81
    
82
    # Modes
83
    test = opts.env_flag('test', False, env_names)
84
    commit = opts.env_flag('commit', False, env_names) and not test
85
        # never commit in test mode
86
    redo = opts.env_flag('redo', False, env_names) and not commit
87
        # never redo in commit mode (run `make schemas/reinstall` instead)
88
    
89
    # Ranges
90
    start = util.cast(int, util.coalesce(util.none_if(
91
        opts.get_env_var('start', None, env_names), u''), 1)) # 1-based
92
    # Make start interally 0-based.
93
    # It's 1-based to the user to match up with the staging table row #s.
94
    start -= 1
95
    if test: n_default = 1
96
    else: n_default = None
97
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
98
        u''))
99
    end = n
100
    if end != None: end += start
101
    
102
    # Debugging
103
    verbosity = util.cast(float, opts.get_env_var('verbosity', None, env_names))
104
    opts.get_env_var('profile_to', None, env_names) # add to env_names
105
    
106
    # DB
107
    def get_db_config(prefix):
108
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
109
    in_db_config = get_db_config('in')
110
    out_db_config = get_db_config('out')
111
    in_is_db = 'engine' in in_db_config
112
    out_is_db = 'engine' in out_db_config
113
    in_schema = opts.get_env_var('in_schema', None, env_names)
114
    in_table = opts.get_env_var('in_table', None, env_names)
115
    if in_schema != None:
116
        for config in [in_db_config, out_db_config]:
117
            config['schemas'] += ','+in_schema
118
    
119
    # Optimization
120
    cache_sql = opts.env_flag('cache_sql', True, env_names)
121
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
122
        env_names) # by-column optimization only applies if mapping to same DB
123
    if test: cpus_default = 0
124
    else: cpus_default = 0 # or None to use parallel processing by default
125
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
126
        env_names), u''))
127
    
128
    # Set default verbosity. Must happen after by_col is set.
129
    if verbosity == None:
130
        if test: verbosity = 0.5 # automated tests should not be verbose
131
        elif by_col: verbosity = 3 # show all queries to assist debugging
132
        else: verbosity = 1.1 # just show row progress
133
    
134
    # fix verbosity
135
    if by_col and not test: verbosity = ints.set_min(verbosity, 2)
136
        # live column-based import MUST be run with verbosity 2+ (3 preferred)
137
        # to provide debugging information for often-complex errors.
138
        # without this, debugging is effectively impossible.
139
        # automated tests are exempt from this because they output to the screen
140
    
141
    ##
142
    
143
    # Logging
144
    verbose_errors = test and verbosity > 0
145
    debug = verbosity >= 1.5
146
    def log(msg, level=1):
147
        '''Higher level -> more verbose'''
148
        if level <= verbosity:
149
            if verbosity <= 2:
150
                if level == 1.5: msg = '# '+msg # msg is Redmine list item
151
                elif msg.startswith('DB query:'): # remove extra debug info
152
                    first_line, nl, msg = msg.partition('\n')
153
            elif level > 1: msg = '['+str(level)+'] '+msg # include level in msg
154
            
155
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
156
    if debug: log_debug = lambda msg, level=2: log(msg, level)
157
    else: log_debug = sql.log_debug_none
158
    
159
    # Parse args
160
    map_paths = sys.argv[1:]
161
    if map_paths == []:
162
        if in_is_db or not out_is_db: usage_err()
163
        else: map_paths = [None]
164
    
165
    def connect_db(db_config):
166
        log('Connecting to '+sql.db_config_str(db_config))
167
        return sql.connect(db_config, caching=cache_sql, autocommit=commit,
168
            debug_temp=verbosity > 3 and commit, log_debug=log_debug)
169
    
170
    if end != None: end_str = str(end-1) # end is one past the last #
171
    else: end_str = 'end'
172
    log('Processing input rows '+str(start)+'-'+end_str)
173
    
174
    ex_tracker = exc.ExPercentTracker(iter_text='row')
175
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
176
    
177
    # Parallel processing
178
    pool = parallelproc.MultiProducerPool(cpus)
179
    log('Using '+str(pool.process_ct)+' parallel CPUs')
180
    
181
    # Set up DB access
182
    row_ins_ct_ref = [0]
183
    if out_is_db:
184
        out_db = connect_db(out_db_config)
185
        def is_rel_func(name):
186
            return (name in db_xml.put_special_funcs
187
                or sql.function_exists(out_db, sql_gen.Function(name)))
188
    
189
    doc = xml_dom.create_doc()
190
    root = doc.documentElement
191
    out_is_xml_ref = [False]
192
    
193
    in_label_ref = [None]
194
    col_defaults = {}
195
    def update_in_label():
196
        os.environ.setdefault('source', in_schema)
197
        os.environ.setdefault('source', in_label_ref[0])
198
    
199
    def prep_root():
200
        root.clear()
201
        update_in_label()
202
    prep_root()
203
    
204
    def process_input(root, row_ready, map_path):
205
        '''Inputs datasource to XML tree, mapping if needed'''
206
        # Load map header
207
        in_is_xpaths = True
208
        out_is_xpaths = True
209
        out_label = None
210
        if map_path != None:
211
            metadata = []
212
            mappings = []
213
            stream = open(map_path, 'rb')
214
            reader = csv.reader(stream)
215
            in_label, out_label = reader.next()[:2]
216
            
217
            def split_col_name(name):
218
                label, sep, root = name.partition(':')
219
                return label, sep != '', root, []
220
            
221
            in_label, in_root, prefixes = maps.col_info(in_label)
222
            in_is_xpaths = in_root != None
223
            in_label_ref[0] = in_label
224
            update_in_label()
225
            out_label, out_root = maps.col_info(out_label)[:2]
226
            out_is_xpaths = out_root != None
227
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
228
                # outer elements are types
229
            
230
            for row in reader:
231
                in_, out = row[:2]
232
                if out != '': mappings.append([in_, out_root+out])
233
            
234
            stream.close()
235
            
236
            root.ownerDocument.documentElement.tagName = out_label
237
        in_is_xml = in_is_xpaths and not in_is_db
238
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
239
        
240
        def process_rows(process_row, rows, rows_start=0):
241
            '''Processes input rows
242
            @param process_row(in_row, i)
243
            @rows_start The (0-based) row # of the first row in rows. Set this
244
                only if the pre-start rows have already been skipped.
245
            '''
246
            rows = iter(rows)
247
            
248
            if end != None: row_nums = xrange(rows_start, end)
249
            else: row_nums = itertools.count(rows_start)
250
            i = -1
251
            for i in row_nums:
252
                try: row = rows.next()
253
                except StopIteration:
254
                    i -= 1 # last row # didn't count
255
                    break # no more rows
256
                if i < start: continue # not at start row yet
257
                
258
                # Row # is interally 0-based, but 1-based to the user
259
                log('Processing input row #'+str(i+1), level=1.1)
260
                process_row(row, i)
261
                row_ready(i, row)
262
            row_ct = i-start+1
263
            return row_ct
264
        
265
        def map_rows(get_value, rows, **kw_args):
266
            '''Maps input rows
267
            @param get_value(in_, row):str
268
            '''
269
            # Prevent collisions if multiple inputs mapping to same output
270
            outputs_idxs = dict()
271
            for i, mapping in enumerate(mappings):
272
                in_, out = mapping
273
                default = util.NamedTuple(count=1, first=i)
274
                idxs = outputs_idxs.setdefault(out, default)
275
                if idxs is not default: # key existed, so there was a collision
276
                    if idxs.count == 1: # first key does not yet have suffix
277
                        mappings[idxs.first][1] += collision_suffix+'0'
278
                    mappings[i][1] += collision_suffix+str(idxs.count)
279
                    idxs.count += 1
280
            
281
            id_node = None
282
            if out_is_db:
283
                mappings_orig = mappings[:] # save a copy
284
                mappings[:] = [] # empty existing elements
285
                for in_, out in mappings_orig:
286
                    in_str = strings.ustr(in_)
287
                    is_metadata_ = is_metadata(in_str)
288
                    if is_metadata_: value = metadata_value(in_str)
289
                    else: value = '$'+in_str # mark as name
290
                    
291
                    # All put_obj()s should return the same id_node
292
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
293
                        value) # value is placeholder that documents name
294
                    if not is_metadata_: mappings.append([in_, nodes])
295
                if id_node == None:
296
                    warnings.warn(UserWarning('Map warning: No mappings or no '
297
                        'column name matches. Are you importing the correct '
298
                        'input table?'))
299
                xml_func.simplify(root)
300
                sys.stdout.write(strings.to_raw_str('<!--put template-->\n'
301
                    +strings.ustr(root)))
302
                sys.stdout.flush()
303
            
304
            def process_row(row, i):
305
                row_id = str(i)
306
                if id_node != None: xml_dom.set_value(id_node, row_id)
307
                for in_, out in mappings:
308
                    log_debug('Getting '+strings.ustr(in_))
309
                    value = cleanup(get_value(in_, row))
310
                    log_debug('Putting '+strings.urepr(value)+' to '
311
                        +strings.ustr(out))
312
                    if out_is_db: # out is list of XML nodes
313
                        for node in out: xml_dom.set_value(node, value)
314
                    elif value != None: # out is XPath
315
                        xpath.put_obj(root, out, row_id, has_types, value)
316
            return process_rows(process_row, rows, **kw_args)
317
        
318
        def map_table(col_names, rows, **kw_args):
319
            col_names_ct = len(col_names)
320
            col_idxs = util.list_flip(col_names)
321
            col_names_map = dict(zip(col_names, col_names))
322
            
323
            # Resolve names
324
            mappings_orig = mappings[:] # save a copy
325
            mappings[:] = [] # empty existing elements
326
            for in_, out in mappings_orig:
327
                if is_metadata(in_): mappings.append([in_, out])
328
                else:
329
                    try: cols = get_with_prefix(col_names_map, [], in_)
330
                    except KeyError: pass
331
                    else:
332
                        mappings[len(mappings):] = [[db_xml.ColRef(
333
                            orig, col_idxs[orig]), out] for simp, orig in cols]
334
                            # can't use += because that uses =
335
            
336
            def get_value(in_, row): return row.list[in_.idx]
337
            def wrap_row(row):
338
                return util.ListDict(util.list_as_length(row, col_names_ct),
339
                    col_names, col_idxs) # handle CSV rows of different lengths
340
            
341
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
342
        
343
        if in_is_db:
344
            def on_error(e): ex_tracker.track(e)
345
            
346
            if by_col: in_db = out_db
347
            else: in_db = connect_db(in_db_config)
348
            
349
            # Get table and schema name
350
            schema = in_schema # modified, so can't have same name as outer var
351
            table = in_table # modified, so can't have same name as outer var
352
            if table == None:
353
                assert in_is_xpaths
354
                schema, sep, table = in_root.partition('.')
355
                if sep == '': # only the table name was specified
356
                    table = schema
357
                    schema = None
358
            table = sql_gen.Table(table, schema)
359
            
360
            # Fetch rows
361
            if by_col: limit = 0 # only fetch column names
362
            else: limit = n
363
            cur = sql.select(in_db, table, limit=limit, start=start,
364
                recover=True, cacheable=False)
365
            col_names = list(sql.col_names(cur))
366
            rows = sql.rows(cur)
367
            
368
            # inline metadata value columns
369
            col_default_values = {}
370
            for col_name in col_names:
371
                col = sql_gen.Col(col_name, table)
372
                if sql.col_is_constant(in_db, col):
373
                    col_default_values[col_name] = (metadata_prefix +
374
                        sql.col_default_value(in_db, col))
375
            for i, mapping in enumerate(mappings):
376
                in_, out = mapping
377
                mappings[i] = (col_default_values.get(in_, in_), out)
378
            
379
            if by_col:
380
                map_table(col_names, []) # just create the template
381
                
382
                if table != None and start == 0 and n == None: # full re-import
383
                    log('Clearing errors table')
384
                    errors_table_ = sql_io.errors_table(in_db, table)
385
                    if errors_table_ != None:
386
                        sql.drop_table(in_db, errors_table_)
387
                
388
                # Strip XML functions not in the DB
389
                xml_func.process(root, is_rel_func=is_rel_func)
390
                if debug: log_debug('Putting stripped:\n'+strings.ustr(root))
391
                    # only calc if debug
392
                
393
                # Import rows
394
                in_row_ct_ref = [0]
395
                db_xml.put_table(in_db, root.firstChild, table, in_row_ct_ref,
396
                    row_ins_ct_ref, n, start, on_error, col_defaults)
397
                row_ct = in_row_ct_ref[0]
398
            else:
399
                # Use normal by-row method
400
                row_ct = map_table(col_names, rows, rows_start=start)
401
                    # rows_start: pre-start rows have been skipped
402
                
403
                in_db.db.close()
404
        elif in_is_xml:
405
            stdin = streams.LineCountStream(sys.stdin)
406
            def on_error(e):
407
                exc.add_msg(e, term.emph('input line #:')+' '
408
                    +str(stdin.line_num))
409
                ex_tracker.track(e)
410
            
411
            def get_rows(doc2rows):
412
                return iters.flatten(itertools.imap(doc2rows,
413
                    xml_parse.docs_iter(stdin, on_error)))
414
            
415
            if map_path == None:
416
                def doc2rows(in_xml_root):
417
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
418
                    util.skip(iter_, xml_dom.is_text) # skip metadata
419
                    return iter_
420
                
421
                row_ct = process_rows(lambda row, i: root.appendChild(row),
422
                    get_rows(doc2rows))
423
            else:
424
                def doc2rows(in_xml_root):
425
                    rows = xpath.get(in_xml_root, in_root, limit=end)
426
                    if rows == []: warnings.warn(UserWarning('Map warning: '
427
                        'Root "'+in_root+'" not found in input'))
428
                    return rows
429
                
430
                def get_value(in_, row):
431
                    nodes = xpath.get(row, in_, allow_rooted=False)
432
                    if nodes != []: return xml_dom.value(nodes[0])
433
                    else: return None
434
                
435
                row_ct = map_rows(get_value, get_rows(doc2rows))
436
        else: # input is CSV
437
            reader, col_names = csvs.reader_and_header(sys.stdin)
438
            row_ct = map_table(col_names, reader)
439
        
440
        return row_ct
441
    
442
    def process_inputs(root, row_ready):
443
        row_ct = 0
444
        for map_path in map_paths:
445
            row_ct += process_input(root, row_ready, map_path)
446
        return row_ct
447
    
448
    pool.share_vars(locals())
449
    if out_is_db:
450
        try:
451
            if redo: sql.empty_db(out_db)
452
            pool.share_vars(locals())
453
            
454
            def row_ready(row_num, input_row):
455
                row_str_ = [None]
456
                def row_str():
457
                    if row_str_[0] == None:
458
                        # Row # is interally 0-based, but 1-based to the user
459
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
460
                            +'\n'+term.emph('input row:')+'\n'
461
                            +strings.ustr(input_row))
462
                        if verbose_errors: row_str_[0] += ('\n'
463
                            +term.emph('output row:')+'\n'+strings.ustr(root))
464
                    return row_str_[0]
465
                
466
                if debug: log_debug(row_str()) # only calc if debug
467
                
468
                def on_error(e):
469
                    exc.add_msg(e, row_str())
470
                    ex_tracker.track(e, row_num, detail=verbose_errors)
471
                pool.share_vars(locals())
472
                
473
                row_root = root.cloneNode(True) # deep copy so don't modify root
474
                xml_func.process(row_root, on_error, is_rel_func, out_db)
475
                if debug: log_debug('Putting processed:\n'
476
                    +strings.ustr(row_root)) # only calc if debug
477
                if not xml_dom.is_empty(row_root):
478
                    assert xml_dom.has_one_child(row_root)
479
                    try:
480
                        sql.with_savepoint(out_db,
481
                            lambda: db_xml.put(out_db, row_root.firstChild,
482
                                row_ins_ct_ref, on_error, col_defaults))
483
                    except sql.DatabaseErrors, e: on_error(e)
484
            
485
            row_ct = process_inputs(root, row_ready)
486
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
487
                ' new rows into database\n')
488
            sys.stdout.flush()
489
            
490
            # Consume asynchronous tasks
491
            pool.main_loop()
492
        finally: out_db.close()
493
    else:
494
        def on_error(e): ex_tracker.track(e)
495
        def row_ready(row_num, input_row): pass
496
        row_ct = process_inputs(root, row_ready)
497
        xml_func.process(root, on_error)
498
        if out_is_xml_ref[0]:
499
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
500
        else: # output is CSV
501
            raise NotImplementedError('CSV output not supported yet')
502
    
503
    # Consume any asynchronous tasks not already consumed above
504
    pool.main_loop()
505
    
506
    profiler.stop(row_ct)
507
    if not by_col: ex_tracker.add_iters(row_ct) # only if errors are done by row
508
    log('Processed '+str(row_ct)+' input rows')
509
    log(profiler.msg())
510
    log(ex_tracker.msg())
511
    ex_tracker.exit()
512

    
513
def main():
514
    try: main_()
515
    except Parser.SyntaxError, e: raise SystemExit(strings.ustr(e))
516

    
517
if __name__ == '__main__':
518
    profile_to = opts.get_env_var('profile_to', None)
519
    if profile_to != None:
520
        import cProfile
521
        sys.stderr.write('Profiling to '+profile_to+'\n')
522
        cProfile.run(main.func_code, profile_to)
523
    else: main()
(45-45/87)