Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import db_xml
17
import exc
18
import iters
19
import maps
20
import opts
21
import parallelproc
22
import Parser
23
import profiling
24
import sql
25
import streams
26
import strings
27
import term
28
import util
29
import xpath
30
import xml_dom
31
import xml_func
32
import xml_parse
33

    
34
def get_with_prefix(map_, prefixes, key):
35
    '''Gets all entries for the given key with any of the given prefixes
36
    @return tuple(found_key, found_value)
37
    '''
38
    values = []
39
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
40
        try: value = map_[key_]
41
        except KeyError, e: continue # keep going
42
        values.append((key_, value))
43
    
44
    if values != []: return values
45
    else: raise e # re-raise last KeyError
46

    
47
def metadata_value(name): return None # this feature has been removed
48

    
49
def cleanup(val):
50
    if val == None: return val
51
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
52

    
53
def main_():
54
    env_names = []
55
    def usage_err():
56
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
57
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
58
            'Note: Row #s start with 1')
59
    
60
    ## Get config from env vars
61
    
62
    # Modes
63
    test = opts.env_flag('test', False, env_names)
64
    commit = opts.env_flag('commit', False, env_names) and not test
65
        # never commit in test mode
66
    redo = opts.env_flag('redo', test, env_names) and not commit
67
        # never redo in commit mode (manually run `make empty_db` instead)
68
    
69
    # Ranges
70
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
71
    # Make start interally 0-based.
72
    # It's 1-based to the user to match up with the staging table row #s.
73
    start -= 1
74
    if test: n_default = 1
75
    else: n_default = None
76
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
77
        u''))
78
    end = n
79
    if end != None: end += start
80
    
81
    # Debugging
82
    debug = opts.env_flag('debug', False, env_names)
83
    sql.run_raw_query.debug = debug
84
    verbose = debug or opts.env_flag('verbose', not test, env_names)
85
    verbose_errors = opts.env_flag('verbose_errors', test or debug, env_names)
86
    opts.get_env_var('profile_to', None, env_names) # add to env_names
87
    
88
    # DB
89
    def get_db_config(prefix):
90
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
91
    in_db_config = get_db_config('in')
92
    out_db_config = get_db_config('out')
93
    in_is_db = 'engine' in in_db_config
94
    out_is_db = 'engine' in out_db_config
95
    in_schema = opts.get_env_var('in_schema', None, env_names)
96
    in_table = opts.get_env_var('in_table', None, env_names)
97
    
98
    # Optimization
99
    cache_sql = opts.env_flag('cache_sql', True, env_names)
100
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
101
        env_names) # by-column optimization only applies if mapping to same DB
102
    if test: cpus_default = 0
103
    else: cpus_default = 0 # or None to use parallel processing by default
104
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
105
        env_names), u''))
106
    
107
    ##
108
    
109
    # Logging
110
    def log(msg, on=verbose):
111
        if on: sys.stderr.write(msg+'\n')
112
    if debug: log_debug = lambda msg: log(msg, debug)
113
    else: log_debug = sql.log_debug_none
114
    
115
    # Parse args
116
    map_paths = sys.argv[1:]
117
    if map_paths == []:
118
        if in_is_db or not out_is_db: usage_err()
119
        else: map_paths = [None]
120
    
121
    def connect_db(db_config):
122
        log('Connecting to '+sql.db_config_str(db_config))
123
        return sql.connect(db_config, caching=cache_sql,
124
            autocommit=debug and commit, log_debug=log_debug)
125
    
126
    if end != None: end_str = str(end-1) # end is one past the last #
127
    else: end_str = 'end'
128
    log('Processing input rows '+str(start)+'-'+end_str)
129
    
130
    ex_tracker = exc.ExPercentTracker(iter_text='row')
131
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
132
    
133
    # Parallel processing
134
    pool = parallelproc.MultiProducerPool(cpus)
135
    log('Using '+str(pool.process_ct)+' parallel CPUs')
136
    
137
    doc = xml_dom.create_doc()
138
    root = doc.documentElement
139
    out_is_xml_ref = [False]
140
    in_label_ref = [None]
141
    def update_in_label():
142
        if in_label_ref[0] != None:
143
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
144
    def prep_root():
145
        root.clear()
146
        update_in_label()
147
    prep_root()
148
    
149
    # Define before the out_is_db section because it's used by by_col
150
    row_ins_ct_ref = [0]
151
    
152
    def process_input(root, row_ready, map_path):
153
        '''Inputs datasource to XML tree, mapping if needed'''
154
        # Load map header
155
        in_is_xpaths = True
156
        out_is_xpaths = True
157
        out_label = None
158
        if map_path != None:
159
            metadata = []
160
            mappings = []
161
            stream = open(map_path, 'rb')
162
            reader = csv.reader(stream)
163
            in_label, out_label = reader.next()[:2]
164
            
165
            def split_col_name(name):
166
                label, sep, root = name.partition(':')
167
                label, sep2, prefixes_str = label.partition('[')
168
                prefixes_str = strings.remove_suffix(']', prefixes_str)
169
                prefixes = strings.split(',', prefixes_str)
170
                return label, sep != '', root, prefixes
171
                    # extract datasrc from "datasrc[data_format]"
172
            
173
            in_label, in_root, prefixes = maps.col_info(in_label)
174
            in_is_xpaths = in_root != None
175
            in_label_ref[0] = in_label
176
            update_in_label()
177
            out_label, out_root = maps.col_info(out_label)[:2]
178
            out_is_xpaths = out_root != None
179
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
180
                # outer elements are types
181
            
182
            for row in reader:
183
                in_, out = row[:2]
184
                if out != '': mappings.append([in_, out_root+out])
185
            
186
            stream.close()
187
            
188
            root.ownerDocument.documentElement.tagName = out_label
189
        in_is_xml = in_is_xpaths and not in_is_db
190
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
191
        
192
        def process_rows(process_row, rows, rows_start=0):
193
            '''Processes input rows      
194
            @param process_row(in_row, i)
195
            @rows_start The (0-based) row # of the first row in rows. Set this
196
                only if the pre-start rows have already been skipped.
197
            '''
198
            rows = iter(rows)
199
            
200
            if end != None: row_nums = xrange(rows_start, end)
201
            else: row_nums = itertools.count(rows_start)
202
            i = -1
203
            for i in row_nums:
204
                try: row = rows.next()
205
                except StopIteration:
206
                    i -= 1 # last row # didn't count
207
                    break # no more rows
208
                if i < start: continue # not at start row yet
209
                
210
                process_row(row, i)
211
                row_ready(i, row)
212
            row_ct = i-start+1
213
            return row_ct
214
        
215
        def map_rows(get_value, rows, **kw_args):
216
            '''Maps input rows
217
            @param get_value(in_, row):str
218
            '''
219
            # Prevent collisions if multiple inputs mapping to same output
220
            outputs_idxs = dict()
221
            for i, mapping in enumerate(mappings):
222
                in_, out = mapping
223
                default = util.NamedTuple(count=1, first=i)
224
                idxs = outputs_idxs.setdefault(out, default)
225
                if idxs is not default: # key existed, so there was a collision
226
                    if idxs.count == 1: # first key does not yet have /_alt/#
227
                        mappings[idxs.first][1] += '/_alt/0'
228
                    mappings[i][1] += '/_alt/'+str(idxs.count)
229
                    idxs.count += 1
230
            
231
            id_node = None
232
            if out_is_db:
233
                for i, mapping in enumerate(mappings):
234
                    in_, out = mapping
235
                    # All put_obj()s should return the same id_node
236
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
237
                        '$'+str(in_)) # value is placeholder that documents name
238
                    mappings[i] = [in_, nodes]
239
                assert id_node != None
240
                
241
                if debug: # only calc if debug
242
                    log_debug('Put template:\n'+str(root))
243
            
244
            def process_row(row, i):
245
                row_id = str(i)
246
                if id_node != None: xml_dom.set_value(id_node, row_id)
247
                for in_, out in mappings:
248
                    log_debug('Getting '+str(in_))
249
                    value = metadata_value(in_)
250
                    if value == None: value = cleanup(get_value(in_, row))
251
                    log_debug('Putting '+repr(value)+' to '+str(out))
252
                    if out_is_db: # out is list of XML nodes
253
                        for node in out: xml_dom.set_value(node, value)
254
                    elif value != None: # out is XPath
255
                        xpath.put_obj(root, out, row_id, has_types, value)
256
            return process_rows(process_row, rows, **kw_args)
257
        
258
        def map_table(col_names, rows, **kw_args):
259
            col_names_ct = len(col_names)
260
            col_idxs = util.list_flip(col_names)
261
            
262
            # Resolve prefixes
263
            mappings_orig = mappings[:] # save a copy
264
            mappings[:] = [] # empty existing elements
265
            for in_, out in mappings_orig:
266
                if metadata_value(in_) == None:
267
                    try: cols = get_with_prefix(col_idxs, prefixes, in_)
268
                    except KeyError: pass
269
                    else: mappings[len(mappings):] = [[db_xml.ColRef(*col), out]
270
                        for col in cols] # can't use += because that uses =
271
            
272
            def get_value(in_, row): return row.list[in_.idx]
273
            def wrap_row(row):
274
                return util.ListDict(util.list_as_length(row, col_names_ct),
275
                    col_names, col_idxs) # handle CSV rows of different lengths
276
            
277
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
278
        
279
        stdin = streams.LineCountStream(sys.stdin)
280
        def on_error(e):
281
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
282
            ex_tracker.track(e)
283
        
284
        if in_is_db:
285
            in_db = connect_db(in_db_config)
286
            
287
            # Get table and schema name
288
            schema = in_schema # modified, so can't have same name as outer var
289
            table = in_table # modified, so can't have same name as outer var
290
            if table == None:
291
                assert in_is_xpaths
292
                schema, sep, table = in_root.partition('.')
293
                if sep == '': # only the table name was specified
294
                    table = schema
295
                    schema = None
296
            
297
            # Fetch rows
298
            if by_col: limit = 0 # only fetch column names
299
            else: limit = n
300
            cur = sql.select(in_db, sql.qual_name(in_db, schema, table),
301
                limit=limit, start=start, cacheable=False, table_is_esc=True)
302
            col_names = list(sql.col_names(cur))
303
            
304
            if by_col:
305
                map_table(col_names, []) # just create the template
306
                xml_func.strip(root)
307
                if debug: log_debug('Putting stripped:\n'+str(root))
308
                    # only calc if debug
309
                db_xml.put_table(in_db, root.firstChild, table, schema, n,
310
                    start, commit, row_ins_ct_ref)
311
                row_ct = 0 # unknown for now
312
            else:
313
                # Use normal by-row method
314
                row_ct = map_table(col_names, sql.rows(cur), rows_start=start)
315
                    # rows_start: pre-start rows have been skipped
316
            
317
            in_db.db.close()
318
        elif in_is_xml:
319
            def get_rows(doc2rows):
320
                return iters.flatten(itertools.imap(doc2rows,
321
                    xml_parse.docs_iter(stdin, on_error)))
322
            
323
            if map_path == None:
324
                def doc2rows(in_xml_root):
325
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
326
                    util.skip(iter_, xml_dom.is_text) # skip metadata
327
                    return iter_
328
                
329
                row_ct = process_rows(lambda row, i: root.appendChild(row),
330
                    get_rows(doc2rows))
331
            else:
332
                def doc2rows(in_xml_root):
333
                    rows = xpath.get(in_xml_root, in_root, limit=end)
334
                    if rows == []: raise SystemExit('Map error: Root "'
335
                        +in_root+'" not found in input')
336
                    return rows
337
                
338
                def get_value(in_, row):
339
                    in_ = './{'+(','.join(strings.with_prefixes(
340
                        ['']+prefixes, in_)))+'}' # also with no prefix
341
                    nodes = xpath.get(row, in_, allow_rooted=False)
342
                    if nodes != []: return xml_dom.value(nodes[0])
343
                    else: return None
344
                
345
                row_ct = map_rows(get_value, get_rows(doc2rows))
346
        else: # input is CSV
347
            map_ = dict(mappings)
348
            reader, col_names = csvs.reader_and_header(sys.stdin)
349
            row_ct = map_table(col_names, reader)
350
        
351
        return row_ct
352
    
353
    def process_inputs(root, row_ready):
354
        row_ct = 0
355
        for map_path in map_paths:
356
            row_ct += process_input(root, row_ready, map_path)
357
        return row_ct
358
    
359
    pool.share_vars(locals())
360
    if out_is_db:
361
        out_db = connect_db(out_db_config)
362
        try:
363
            if redo: sql.empty_db(out_db)
364
            pool.share_vars(locals())
365
            
366
            def row_ready(row_num, input_row):
367
                row_str_ = [None]
368
                def row_str():
369
                    if row_str_[0] == None:
370
                        # Row # is interally 0-based, but 1-based to the user
371
                        row_str_[0] = (term.emph('row #:')+' '+str(row_num+1)
372
                            +'\n'+term.emph('input row:')+'\n'+str(input_row))
373
                        if verbose_errors: row_str_[0] += ('\n'
374
                            +term.emph('output row:')+'\n'+str(root))
375
                    return row_str_[0]
376
                
377
                if debug: log_debug(row_str()) # only calc if debug
378
                
379
                def on_error(e):
380
                    exc.add_msg(e, row_str())
381
                    ex_tracker.track(e, row_num, detail=verbose_errors)
382
                pool.share_vars(locals())
383
                
384
                row_root = root.cloneNode(True) # deep copy so don't modify root
385
                xml_func.process(row_root, on_error, out_db)
386
                if not xml_dom.is_empty(row_root):
387
                    assert xml_dom.has_one_child(row_root)
388
                    try:
389
                        sql.with_savepoint(out_db,
390
                            lambda: db_xml.put(out_db, row_root.firstChild,
391
                                row_ins_ct_ref, on_error))
392
                        if commit: out_db.db.commit()
393
                    except sql.DatabaseErrors, e: on_error(e)
394
            
395
            row_ct = process_inputs(root, row_ready)
396
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
397
                ' new rows into database\n')
398
            
399
            # Consume asynchronous tasks
400
            pool.main_loop()
401
        finally:
402
            if out_db.connected():
403
                out_db.db.rollback()
404
                out_db.db.close()
405
    else:
406
        def on_error(e): ex_tracker.track(e)
407
        def row_ready(row_num, input_row): pass
408
        row_ct = process_inputs(root, row_ready)
409
        xml_func.process(root, on_error)
410
        if out_is_xml_ref[0]:
411
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
412
        else: # output is CSV
413
            raise NotImplementedError('CSV output not supported yet')
414
    
415
    # Consume any asynchronous tasks not already consumed above
416
    pool.main_loop()
417
    
418
    profiler.stop(row_ct)
419
    ex_tracker.add_iters(row_ct)
420
    if verbose:
421
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
422
        sys.stderr.write(profiler.msg()+'\n')
423
        sys.stderr.write(ex_tracker.msg()+'\n')
424
    ex_tracker.exit()
425

    
426
def main():
427
    try: main_()
428
    except Parser.SyntaxError, e: raise SystemExit(str(e))
429

    
430
if __name__ == '__main__':
431
    profile_to = opts.get_env_var('profile_to', None)
432
    if profile_to != None:
433
        import cProfile
434
        sys.stderr.write('Profiling to '+profile_to+'\n')
435
        cProfile.run(main.func_code, profile_to)
436
    else: main()
(25-25/48)