Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import db_xml
17
import exc
18
import iters
19
import maps
20
import opts
21
import parallelproc
22
import Parser
23
import profiling
24
import sql
25
import streams
26
import strings
27
import term
28
import util
29
import xpath
30
import xml_dom
31
import xml_func
32
import xml_parse
33

    
34
def get_with_prefix(map_, prefixes, key):
35
    '''Gets all entries for the given key with any of the given prefixes
36
    @return tuple(found_key, found_value)
37
    '''
38
    values = []
39
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
40
        try: value = map_[key_]
41
        except KeyError, e: continue # keep going
42
        values.append((key_, value))
43
    
44
    if values != []: return values
45
    else: raise e # re-raise last KeyError
46

    
47
def metadata_value(name): return None # this feature has been removed
48

    
49
def cleanup(val):
50
    if val == None: return val
51
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
52

    
53
def main_():
54
    env_names = []
55
    def usage_err():
56
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
57
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
58
            'Note: Row #s start with 1')
59
    
60
    ## Get config from env vars
61
    
62
    # Modes
63
    test = opts.env_flag('test', False, env_names)
64
    commit = opts.env_flag('commit', False, env_names) and not test
65
        # never commit in test mode
66
    redo = opts.env_flag('redo', test, env_names) and not commit
67
        # never redo in commit mode (manually run `make empty_db` instead)
68
    
69
    # Ranges
70
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
71
    # Make start interally 0-based.
72
    # It's 1-based to the user to match up with the staging table row #s.
73
    start -= 1
74
    if test: n_default = 1
75
    else: n_default = None
76
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
77
        u''))
78
    end = n
79
    if end != None: end += start
80
    
81
    # Debugging
82
    debug = opts.env_flag('debug', False, env_names)
83
    sql.run_raw_query.debug = debug
84
    verbose = debug or opts.env_flag('verbose', not test, env_names)
85
    verbose_errors = opts.env_flag('verbose_errors', test or debug, env_names)
86
    opts.get_env_var('profile_to', None, env_names) # add to env_names
87
    
88
    # DB
89
    def get_db_config(prefix):
90
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
91
    in_db_config = get_db_config('in')
92
    out_db_config = get_db_config('out')
93
    in_is_db = 'engine' in in_db_config
94
    out_is_db = 'engine' in out_db_config
95
    in_schema = opts.get_env_var('in_schema', None, env_names)
96
    in_table = opts.get_env_var('in_table', None, env_names)
97
    
98
    # Optimization
99
    cache_sql = opts.env_flag('cache_sql', True, env_names)
100
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
101
        env_names) # by-column optimization only applies if mapping to same DB
102
    if test: cpus_default = 0
103
    else: cpus_default = 0 # or None to use parallel processing by default
104
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
105
        env_names), u''))
106
    
107
    ##
108
    
109
    # Logging
110
    def log(msg, on=verbose):
111
        if on: sys.stderr.write(msg+'\n')
112
    if debug: log_debug = lambda msg: log(msg, debug)
113
    else: log_debug = sql.log_debug_none
114
    
115
    # Parse args
116
    map_paths = sys.argv[1:]
117
    if map_paths == []:
118
        if in_is_db or not out_is_db: usage_err()
119
        else: map_paths = [None]
120
    
121
    def connect_db(db_config):
122
        log('Connecting to '+sql.db_config_str(db_config))
123
        return sql.connect(db_config, log_debug=log_debug, caching=cache_sql)
124
    
125
    if end != None: end_str = str(end-1) # end is one past the last #
126
    else: end_str = 'end'
127
    log('Processing input rows '+str(start)+'-'+end_str)
128
    
129
    ex_tracker = exc.ExPercentTracker(iter_text='row')
130
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
131
    
132
    # Parallel processing
133
    pool = parallelproc.MultiProducerPool(cpus)
134
    log('Using '+str(pool.process_ct)+' parallel CPUs')
135
    
136
    doc = xml_dom.create_doc()
137
    root = doc.documentElement
138
    out_is_xml_ref = [False]
139
    in_label_ref = [None]
140
    def update_in_label():
141
        if in_label_ref[0] != None:
142
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
143
    def prep_root():
144
        root.clear()
145
        update_in_label()
146
    prep_root()
147
    
148
    # Define before the out_is_db section because it's used by by_col
149
    row_ins_ct_ref = [0]
150
    
151
    def process_input(root, row_ready, map_path):
152
        '''Inputs datasource to XML tree, mapping if needed'''
153
        # Load map header
154
        in_is_xpaths = True
155
        out_is_xpaths = True
156
        out_label = None
157
        if map_path != None:
158
            metadata = []
159
            mappings = []
160
            stream = open(map_path, 'rb')
161
            reader = csv.reader(stream)
162
            in_label, out_label = reader.next()[:2]
163
            
164
            def split_col_name(name):
165
                label, sep, root = name.partition(':')
166
                label, sep2, prefixes_str = label.partition('[')
167
                prefixes_str = strings.remove_suffix(']', prefixes_str)
168
                prefixes = strings.split(',', prefixes_str)
169
                return label, sep != '', root, prefixes
170
                    # extract datasrc from "datasrc[data_format]"
171
            
172
            in_label, in_root, prefixes = maps.col_info(in_label)
173
            in_is_xpaths = in_root != None
174
            in_label_ref[0] = in_label
175
            update_in_label()
176
            out_label, out_root = maps.col_info(out_label)[:2]
177
            out_is_xpaths = out_root != None
178
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
179
                # outer elements are types
180
            
181
            for row in reader:
182
                in_, out = row[:2]
183
                if out != '': mappings.append([in_, out_root+out])
184
            
185
            stream.close()
186
            
187
            root.ownerDocument.documentElement.tagName = out_label
188
        in_is_xml = in_is_xpaths and not in_is_db
189
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
190
        
191
        def process_rows(process_row, rows, rows_start=0):
192
            '''Processes input rows      
193
            @param process_row(in_row, i)
194
            @rows_start The (0-based) row # of the first row in rows. Set this
195
                only if the pre-start rows have already been skipped.
196
            '''
197
            rows = iter(rows)
198
            
199
            if end != None: row_nums = xrange(rows_start, end)
200
            else: row_nums = itertools.count(rows_start)
201
            i = -1
202
            for i in row_nums:
203
                try: row = rows.next()
204
                except StopIteration:
205
                    i -= 1 # last row # didn't count
206
                    break # no more rows
207
                if i < start: continue # not at start row yet
208
                
209
                process_row(row, i)
210
                row_ready(i, row)
211
            row_ct = i-start+1
212
            return row_ct
213
        
214
        def map_rows(get_value, rows, **kw_args):
215
            '''Maps input rows
216
            @param get_value(in_, row):str
217
            '''
218
            # Prevent collisions if multiple inputs mapping to same output
219
            outputs_idxs = dict()
220
            for i, mapping in enumerate(mappings):
221
                in_, out = mapping
222
                default = util.NamedTuple(count=1, first=i)
223
                idxs = outputs_idxs.setdefault(out, default)
224
                if idxs is not default: # key existed, so there was a collision
225
                    if idxs.count == 1: # first key does not yet have /_alt/#
226
                        mappings[idxs.first][1] += '/_alt/0'
227
                    mappings[i][1] += '/_alt/'+str(idxs.count)
228
                    idxs.count += 1
229
            
230
            id_node = None
231
            if out_is_db:
232
                for i, mapping in enumerate(mappings):
233
                    in_, out = mapping
234
                    # All put_obj()s should return the same id_node
235
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
236
                        '$'+str(in_)) # value is placeholder that documents name
237
                    mappings[i] = [in_, nodes]
238
                assert id_node != None
239
                
240
                if debug: # only calc if debug
241
                    log_debug('Put template:\n'+str(root))
242
            
243
            def process_row(row, i):
244
                row_id = str(i)
245
                if id_node != None: xml_dom.set_value(id_node, row_id)
246
                for in_, out in mappings:
247
                    log_debug('Getting '+str(in_))
248
                    value = metadata_value(in_)
249
                    if value == None: value = cleanup(get_value(in_, row))
250
                    log_debug('Putting '+repr(value)+' to '+str(out))
251
                    if out_is_db: # out is list of XML nodes
252
                        for node in out: xml_dom.set_value(node, value)
253
                    elif value != None: # out is XPath
254
                        xpath.put_obj(root, out, row_id, has_types, value)
255
            return process_rows(process_row, rows, **kw_args)
256
        
257
        def map_table(col_names, rows, **kw_args):
258
            col_names_ct = len(col_names)
259
            col_idxs = util.list_flip(col_names)
260
            
261
            # Resolve prefixes
262
            mappings_orig = mappings[:] # save a copy
263
            mappings[:] = [] # empty existing elements
264
            for in_, out in mappings_orig:
265
                if metadata_value(in_) == None:
266
                    try: cols = get_with_prefix(col_idxs, prefixes, in_)
267
                    except KeyError: pass
268
                    else: mappings[len(mappings):] = [[db_xml.ColRef(*col), out]
269
                        for col in cols] # can't use += because that uses =
270
            
271
            def get_value(in_, row): return row.list[in_.idx]
272
            def wrap_row(row):
273
                return util.ListDict(util.list_as_length(row, col_names_ct),
274
                    col_names, col_idxs) # handle CSV rows of different lengths
275
            
276
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
277
        
278
        stdin = streams.LineCountStream(sys.stdin)
279
        def on_error(e):
280
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
281
            ex_tracker.track(e)
282
        
283
        if in_is_db:
284
            in_db = connect_db(in_db_config)
285
            
286
            # Get table and schema name
287
            schema = in_schema # modified, so can't have same name as outer var
288
            table = in_table # modified, so can't have same name as outer var
289
            if table == None:
290
                assert in_is_xpaths
291
                schema, sep, table = in_root.partition('.')
292
                if sep == '': # only the table name was specified
293
                    table = schema
294
                    schema = None
295
            
296
            # Fetch rows
297
            if by_col: limit = 0 # only fetch column names
298
            else: limit = n
299
            cur = sql.select(in_db, sql.qual_name(in_db, schema, table),
300
                limit=limit, start=start, cacheable=False, table_is_esc=True)
301
            col_names = list(sql.col_names(cur))
302
            
303
            if by_col:
304
                map_table(col_names, []) # just create the template
305
                xml_func.strip(root)
306
                if debug: log_debug('Putting stripped:\n'+str(root))
307
                    # only calc if debug
308
                db_xml.put_table(in_db, root.firstChild, table, schema, commit,
309
                    row_ins_ct_ref)
310
            else:
311
                # Use normal by-row method
312
                row_ct = map_table(col_names, sql.rows(cur), rows_start=start)
313
                    # rows_start: pre-start rows have been skipped
314
            
315
            in_db.db.close()
316
        elif in_is_xml:
317
            def get_rows(doc2rows):
318
                return iters.flatten(itertools.imap(doc2rows,
319
                    xml_parse.docs_iter(stdin, on_error)))
320
            
321
            if map_path == None:
322
                def doc2rows(in_xml_root):
323
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
324
                    util.skip(iter_, xml_dom.is_text) # skip metadata
325
                    return iter_
326
                
327
                row_ct = process_rows(lambda row, i: root.appendChild(row),
328
                    get_rows(doc2rows))
329
            else:
330
                def doc2rows(in_xml_root):
331
                    rows = xpath.get(in_xml_root, in_root, limit=end)
332
                    if rows == []: raise SystemExit('Map error: Root "'
333
                        +in_root+'" not found in input')
334
                    return rows
335
                
336
                def get_value(in_, row):
337
                    in_ = './{'+(','.join(strings.with_prefixes(
338
                        ['']+prefixes, in_)))+'}' # also with no prefix
339
                    nodes = xpath.get(row, in_, allow_rooted=False)
340
                    if nodes != []: return xml_dom.value(nodes[0])
341
                    else: return None
342
                
343
                row_ct = map_rows(get_value, get_rows(doc2rows))
344
        else: # input is CSV
345
            map_ = dict(mappings)
346
            reader, col_names = csvs.reader_and_header(sys.stdin)
347
            row_ct = map_table(col_names, reader)
348
        
349
        return row_ct
350
    
351
    def process_inputs(root, row_ready):
352
        row_ct = 0
353
        for map_path in map_paths:
354
            row_ct += process_input(root, row_ready, map_path)
355
        return row_ct
356
    
357
    pool.share_vars(locals())
358
    if out_is_db:
359
        out_db = connect_db(out_db_config)
360
        try:
361
            if redo: sql.empty_db(out_db)
362
            pool.share_vars(locals())
363
            
364
            def row_ready(row_num, input_row):
365
                row_str_ = [None]
366
                def row_str():
367
                    if row_str_[0] == None:
368
                        # Row # is interally 0-based, but 1-based to the user
369
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
370
                            +'\n'+term.emph('input row:')+'\n'+str(input_row))
371
                        if verbose_errors: row_str_[0] += ('\n'
372
                            +term.emph('output row:')+'\n'+str(root))
373
                    return row_str_[0]
374
                
375
                if debug: log_debug(row_str()) # only calc if debug
376
                
377
                def on_error(e):
378
                    exc.add_msg(e, row_str())
379
                    ex_tracker.track(e, row_num, detail=verbose_errors)
380
                pool.share_vars(locals())
381
                
382
                row_root = root.cloneNode(True) # deep copy so don't modify root
383
                xml_func.process(row_root, on_error, out_db)
384
                if not xml_dom.is_empty(row_root):
385
                    assert xml_dom.has_one_child(row_root)
386
                    try:
387
                        sql.with_savepoint(out_db,
388
                            lambda: db_xml.put(out_db, row_root.firstChild,
389
                                row_ins_ct_ref, on_error))
390
                        if commit: out_db.db.commit()
391
                    except sql.DatabaseErrors, e: on_error(e)
392
            
393
            row_ct = process_inputs(root, row_ready)
394
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
395
                ' new rows into database\n')
396
            
397
            # Consume asynchronous tasks
398
            pool.main_loop()
399
        finally:
400
            if out_db.connected():
401
                out_db.db.rollback()
402
                out_db.db.close()
403
    else:
404
        def on_error(e): ex_tracker.track(e)
405
        def row_ready(row_num, input_row): pass
406
        row_ct = process_inputs(root, row_ready)
407
        xml_func.process(root, on_error)
408
        if out_is_xml_ref[0]:
409
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
410
        else: # output is CSV
411
            raise NotImplementedError('CSV output not supported yet')
412
    
413
    # Consume any asynchronous tasks not already consumed above
414
    pool.main_loop()
415
    
416
    profiler.stop(row_ct)
417
    ex_tracker.add_iters(row_ct)
418
    if verbose:
419
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
420
        sys.stderr.write(profiler.msg()+'\n')
421
        sys.stderr.write(ex_tracker.msg()+'\n')
422
    ex_tracker.exit()
423

    
424
def main():
425
    try: main_()
426
    except Parser.SyntaxError, e: raise SystemExit(str(e))
427

    
428
if __name__ == '__main__':
429
    profile_to = opts.get_env_var('profile_to', None)
430
    if profile_to != None:
431
        import cProfile
432
        sys.stderr.write('Profiling to '+profile_to+'\n')
433
        cProfile.run(main.func_code, profile_to)
434
    else: main()
(25-25/48)