Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallelproc
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
55
            'Note: Row #s start with 1')
56
    
57
    ## Get config from env vars
58
    
59
    # Modes
60
    test = opts.env_flag('test', False, env_names)
61
    commit = opts.env_flag('commit', False, env_names) and not test
62
        # never commit in test mode
63
    redo = opts.env_flag('redo', test, env_names) and not commit
64
        # never redo in commit mode (manually run `make empty_db` instead)
65
    
66
    # Ranges
67
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
68
    # Make start interally 0-based.
69
    # It's 1-based to the user to match up with the staging table row #s.
70
    start -= 1
71
    if test: n_default = 1
72
    else: n_default = None
73
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
74
        u''))
75
    end = n
76
    if end != None: end += start
77
    
78
    # Debugging
79
    debug = opts.env_flag('debug', False, env_names)
80
    sql.run_raw_query.debug = debug
81
    verbose = debug or opts.env_flag('verbose', not test, env_names)
82
    opts.get_env_var('profile_to', None, env_names) # add to env_names
83
    
84
    # DB
85
    def get_db_config(prefix):
86
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
87
    in_db_config = get_db_config('in')
88
    out_db_config = get_db_config('out')
89
    in_is_db = 'engine' in in_db_config
90
    out_is_db = 'engine' in out_db_config
91
    in_schema = opts.get_env_var('in_schema', None, env_names)
92
    in_table = opts.get_env_var('in_table', None, env_names)
93
    
94
    # Optimization
95
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
96
        env_names) # by-column optimization only applies if mapping to same DB
97
    if test: cpus_default = 0
98
    else: cpus_default = None
99
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
100
        env_names), u''))
101
    
102
    ##
103
    
104
    # Logging
105
    def log(msg, on=verbose):
106
        if on: sys.stderr.write(msg+'\n')
107
    if debug: log_debug = lambda msg: log(msg, debug)
108
    else: log_debug = sql.log_debug_none
109
    
110
    # Parse args
111
    map_paths = sys.argv[1:]
112
    if map_paths == []:
113
        if in_is_db or not out_is_db: usage_err()
114
        else: map_paths = [None]
115
    
116
    def connect_db(db_config):
117
        log('Connecting to '+sql.db_config_str(db_config))
118
        return sql.connect(db_config, log_debug=log_debug)
119
    
120
    if end != None: end_str = str(end-1) # end is one past the last #
121
    else: end_str = 'end'
122
    log('Processing input rows '+str(start)+'-'+end_str)
123
    
124
    ex_tracker = exc.ExPercentTracker(iter_text='row')
125
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
126
    
127
    # Parallel processing
128
    pool = parallelproc.MultiProducerPool(cpus)
129
    log('Using '+str(pool.process_ct)+' parallel CPUs')
130
    
131
    doc = xml_dom.create_doc()
132
    root = doc.documentElement
133
    out_is_xml_ref = [False]
134
    in_label_ref = [None]
135
    def update_in_label():
136
        if in_label_ref[0] != None:
137
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
138
    def prep_root():
139
        root.clear()
140
        update_in_label()
141
    prep_root()
142
    
143
    # Define before the out_is_db section because it's used by by_col
144
    row_ins_ct_ref = [0]
145
    
146
    def process_input(root, row_ready, map_path):
147
        '''Inputs datasource to XML tree, mapping if needed'''
148
        # Load map header
149
        in_is_xpaths = True
150
        out_is_xpaths = True
151
        out_label = None
152
        if map_path != None:
153
            metadata = []
154
            mappings = []
155
            stream = open(map_path, 'rb')
156
            reader = csv.reader(stream)
157
            in_label, out_label = reader.next()[:2]
158
            
159
            def split_col_name(name):
160
                label, sep, root = name.partition(':')
161
                label, sep2, prefixes_str = label.partition('[')
162
                prefixes_str = strings.remove_suffix(']', prefixes_str)
163
                prefixes = strings.split(',', prefixes_str)
164
                return label, sep != '', root, prefixes
165
                    # extract datasrc from "datasrc[data_format]"
166
            
167
            in_label, in_root, prefixes = maps.col_info(in_label)
168
            in_is_xpaths = in_root != None
169
            in_label_ref[0] = in_label
170
            update_in_label()
171
            out_label, out_root = maps.col_info(out_label)[:2]
172
            out_is_xpaths = out_root != None
173
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
174
                # outer elements are types
175
            
176
            for row in reader:
177
                in_, out = row[:2]
178
                if out != '': mappings.append([in_, out_root+out])
179
            
180
            stream.close()
181
            
182
            root.ownerDocument.documentElement.tagName = out_label
183
        in_is_xml = in_is_xpaths and not in_is_db
184
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
185
        
186
        def process_rows(process_row, rows, rows_start=0):
187
            '''Processes input rows      
188
            @param process_row(in_row, i)
189
            @rows_start The (0-based) row # of the first row in rows. Set this
190
                only if the pre-start rows have already been skipped.
191
            '''
192
            rows = iter(rows)
193
            
194
            if end != None: row_nums = xrange(rows_start, end)
195
            else: row_nums = itertools.count(rows_start)
196
            i = -1
197
            for i in row_nums:
198
                try: row = rows.next()
199
                except StopIteration:
200
                    i -= 1 # last row # didn't count
201
                    break # no more rows
202
                if i < start: continue # not at start row yet
203
                
204
                process_row(row, i)
205
                row_ready(i, row)
206
            row_ct = i-start+1
207
            return row_ct
208
        
209
        def map_rows(get_value, rows, **kw_args):
210
            '''Maps input rows
211
            @param get_value(in_, row):str
212
            '''
213
            # Prevent collisions if multiple inputs mapping to same output
214
            outputs_idxs = dict()
215
            for i, mapping in enumerate(mappings):
216
                in_, out = mapping
217
                default = util.NamedTuple(count=1, first=i)
218
                idxs = outputs_idxs.setdefault(out, default)
219
                if idxs is not default: # key existed, so there was a collision
220
                    if idxs.count == 1: # first key does not yet have /_alt/#
221
                        mappings[idxs.first][1] += '/_alt/0'
222
                    mappings[i][1] += '/_alt/'+str(idxs.count)
223
                    idxs.count += 1
224
            
225
            id_node = None
226
            if out_is_db:
227
                for i, mapping in enumerate(mappings):
228
                    in_, out = mapping
229
                    # All put_obj()s should return the same id_node
230
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
231
                        '$'+str(in_)) # value is placeholder that documents name
232
                    mappings[i] = [in_, nodes]
233
                assert id_node != None
234
                
235
                if debug: # only str() if debug
236
                    log_debug('Put template:\n'+str(root))
237
            
238
            def process_row(row, i):
239
                row_id = str(i)
240
                if id_node != None: xml_dom.set_value(id_node, row_id)
241
                for in_, out in mappings:
242
                    log_debug('Getting '+str(in_))
243
                    value = metadata_value(in_)
244
                    if value == None: value = cleanup(get_value(in_, row))
245
                    log_debug('Putting '+repr(value)+' to '+str(out))
246
                    if out_is_db: # out is list of XML nodes
247
                        for node in out: xml_dom.set_value(node, value)
248
                    elif value != None: # out is XPath
249
                        xpath.put_obj(root, out, row_id, has_types, value)
250
                if debug: log_debug('Putting:\n'+str(root))# only str() if debug
251
            return process_rows(process_row, rows, **kw_args)
252
        
253
        def map_table(col_names, rows, **kw_args):
254
            col_names_ct = len(col_names)
255
            col_idxs = util.list_flip(col_names)
256
            
257
            # Resolve prefixes
258
            mappings_orig = mappings[:] # save a copy
259
            mappings[:] = [] # empty existing elements
260
            for in_, out in mappings_orig:
261
                if metadata_value(in_) == None:
262
                    try: names = get_with_prefix(col_idxs, prefixes, in_)
263
                    except KeyError: pass
264
                    else: mappings[len(mappings):] = [[v, out] for v in names]
265
                        # can't use += because that uses assignment
266
            
267
            def get_value(in_, row): return row.list[in_]
268
            def wrap_row(row):
269
                return util.ListDict(util.list_as_length(row, col_names_ct),
270
                    col_names, col_idxs) # handle CSV rows of different lengths
271
            
272
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
273
        
274
        stdin = streams.LineCountStream(sys.stdin)
275
        def on_error(e):
276
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
277
            ex_tracker.track(e)
278
        
279
        if in_is_db:
280
            in_db = connect_db(in_db_config)
281
            
282
            # Get table and schema name
283
            schema = in_schema # modified, so can't have same name as outer var
284
            table = in_table # modified, so can't have same name as outer var
285
            if table == None:
286
                assert in_is_xpaths
287
                schema, sep, table = in_root.partition('.')
288
                if sep == '': # only the table name was specified
289
                    table = schema
290
                    schema = None
291
            table_is_esc = False
292
            if schema != None:
293
                table = sql.qual_name(in_db, schema, table)
294
                table_is_esc = True
295
            
296
            # Fetch rows
297
            if by_col: limit = 0 # only fetch column names
298
            else: limit = n
299
            cur = sql.select(in_db, table, limit=limit, start=start,
300
                table_is_esc=table_is_esc)
301
            col_names = list(sql.col_names(cur))
302
            
303
            if by_col:
304
                row_ready = lambda row_num, input_row: None# disable row_ready()
305
                row = ['$'+v for v in col_names] # values are the column names
306
                map_table(col_names, [row]) # map just the sample row
307
                xml_func.strip(root)
308
                db_xml.put_table(in_db, root.firstChild, table, commit,
309
                    row_ins_ct_ref, table_is_esc)
310
            else:
311
                # Use normal by-row method
312
                row_ct = map_table(col_names, sql.rows(cur), rows_start=start)
313
                    # rows_start: pre-start rows have been skipped
314
            
315
            in_db.db.close()
316
        elif in_is_xml:
317
            def get_rows(doc2rows):
318
                return iters.flatten(itertools.imap(doc2rows,
319
                    xml_parse.docs_iter(stdin, on_error)))
320
            
321
            if map_path == None:
322
                def doc2rows(in_xml_root):
323
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
324
                    util.skip(iter_, xml_dom.is_text) # skip metadata
325
                    return iter_
326
                
327
                row_ct = process_rows(lambda row, i: root.appendChild(row),
328
                    get_rows(doc2rows))
329
            else:
330
                def doc2rows(in_xml_root):
331
                    rows = xpath.get(in_xml_root, in_root, limit=end)
332
                    if rows == []: raise SystemExit('Map error: Root "'
333
                        +in_root+'" not found in input')
334
                    return rows
335
                
336
                def get_value(in_, row):
337
                    in_ = './{'+(','.join(strings.with_prefixes(
338
                        ['']+prefixes, in_)))+'}' # also with no prefix
339
                    nodes = xpath.get(row, in_, allow_rooted=False)
340
                    if nodes != []: return xml_dom.value(nodes[0])
341
                    else: return None
342
                
343
                row_ct = map_rows(get_value, get_rows(doc2rows))
344
        else: # input is CSV
345
            map_ = dict(mappings)
346
            reader, col_names = csvs.reader_and_header(sys.stdin)
347
            row_ct = map_table(col_names, reader)
348
        
349
        return row_ct
350
    
351
    def process_inputs(root, row_ready):
352
        row_ct = 0
353
        for map_path in map_paths:
354
            row_ct += process_input(root, row_ready, map_path)
355
        return row_ct
356
    
357
    pool.share_vars(locals())
358
    if out_is_db:
359
        import db_xml
360
        
361
        out_db = connect_db(out_db_config)
362
        try:
363
            if redo: sql.empty_db(out_db)
364
            pool.share_vars(locals())
365
            
366
            def row_ready(row_num, input_row):
367
                def on_error(e):
368
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num+1))
369
                        # row # is interally 0-based, but 1-based to the user
370
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
371
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
372
                    ex_tracker.track(e, row_num)
373
                pool.share_vars(locals())
374
                
375
                row_root = root.cloneNode(True) # deep copy so don't modify root
376
                xml_func.process(row_root, on_error)
377
                if not xml_dom.is_empty(row_root):
378
                    assert xml_dom.has_one_child(row_root)
379
                    try:
380
                        sql.with_savepoint(out_db,
381
                            lambda: db_xml.put(out_db, row_root.firstChild,
382
                                row_ins_ct_ref, on_error))
383
                        if commit: out_db.db.commit()
384
                    except sql.DatabaseErrors, e: on_error(e)
385
            
386
            row_ct = process_inputs(root, row_ready)
387
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
388
                ' new rows into database\n')
389
            
390
            # Consume asynchronous tasks
391
            pool.main_loop()
392
        finally:
393
            out_db.db.rollback()
394
            out_db.db.close()
395
    else:
396
        def on_error(e): ex_tracker.track(e)
397
        def row_ready(row_num, input_row): pass
398
        row_ct = process_inputs(root, row_ready)
399
        xml_func.process(root, on_error)
400
        if out_is_xml_ref[0]:
401
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
402
        else: # output is CSV
403
            raise NotImplementedError('CSV output not supported yet')
404
    
405
    # Consume any asynchronous tasks not already consumed above
406
    pool.main_loop()
407
    
408
    profiler.stop(row_ct)
409
    ex_tracker.add_iters(row_ct)
410
    if verbose:
411
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
412
        sys.stderr.write(profiler.msg()+'\n')
413
        sys.stderr.write(ex_tracker.msg()+'\n')
414
    ex_tracker.exit()
415

    
416
def main():
417
    try: main_()
418
    except Parser.SyntaxError, e: raise SystemExit(str(e))
419

    
420
if __name__ == '__main__':
421
    profile_to = opts.get_env_var('profile_to', None)
422
    if profile_to != None:
423
        import cProfile
424
        sys.stderr.write('Profiling to '+profile_to+'\n')
425
        cProfile.run(main.func_code, profile_to)
426
    else: main()
(25-25/47)