Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
55
            'Note: Row #s start with 1')
56
    
57
    ## Get config from env vars
58
    
59
    # Modes
60
    test = opts.env_flag('test', False, env_names)
61
    commit = opts.env_flag('commit', False, env_names) and not test
62
        # never commit in test mode
63
    redo = opts.env_flag('redo', test, env_names) and not commit
64
        # never redo in commit mode (manually run `make empty_db` instead)
65
    
66
    # Ranges
67
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
68
    # Make start interally 0-based.
69
    # It's 1-based to the user to match up with the staging table row #s.
70
    start -= 1
71
    if test: n_default = 1
72
    else: n_default = None
73
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
74
        u''))
75
    end = n
76
    if end != None: end += start
77
    
78
    # Debugging
79
    debug = opts.env_flag('debug', False, env_names)
80
    sql.run_raw_query.debug = debug
81
    verbose = debug or opts.env_flag('verbose', not test, env_names)
82
    opts.get_env_var('profile_to', None, env_names) # add to env_names
83
    
84
    # DB
85
    def get_db_config(prefix):
86
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
87
    in_db_config = get_db_config('in')
88
    out_db_config = get_db_config('out')
89
    in_is_db = 'engine' in in_db_config
90
    out_is_db = 'engine' in out_db_config
91
    in_schema = opts.get_env_var('in_schema', None, env_names)
92
    in_table = opts.get_env_var('in_table', None, env_names)
93
    
94
    # Optimization
95
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
96
        env_names) # by-column optimization only applies if mapping to same DB
97
    if test: cpus_default = 0
98
    else: cpus_default = None
99
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
100
        env_names), u''))
101
    
102
    ##
103
    
104
    # Logging
105
    def log(msg, on=verbose):
106
        if on: sys.stderr.write(msg+'\n')
107
    if debug: log_debug = lambda msg: log(msg, debug)
108
    else: log_debug = sql.log_debug_none
109
    
110
    # Parse args
111
    map_paths = sys.argv[1:]
112
    if map_paths == []:
113
        if in_is_db or not out_is_db: usage_err()
114
        else: map_paths = [None]
115
    
116
    def connect_db(db_config):
117
        log('Connecting to '+sql.db_config_str(db_config))
118
        return sql.connect(db_config, log_debug=log_debug)
119
    
120
    if end != None: end_str = str(end-1) # end is one past the last #
121
    else: end_str = 'end'
122
    log('Processing input rows '+str(start)+'-'+end_str)
123
    
124
    ex_tracker = exc.ExPercentTracker(iter_text='row')
125
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
126
    
127
    # Parallel processing
128
    pool = parallel.MultiProducerPool(cpus)
129
    log('Using '+str(pool.process_ct)+' parallel CPUs')
130
    
131
    doc = xml_dom.create_doc()
132
    root = doc.documentElement
133
    out_is_xml_ref = [False]
134
    in_label_ref = [None]
135
    def update_in_label():
136
        if in_label_ref[0] != None:
137
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
138
    def prep_root():
139
        root.clear()
140
        update_in_label()
141
    prep_root()
142
    
143
    # Define before the out_is_db section because it's used by by_col
144
    row_ins_ct_ref = [0]
145
    
146
    def process_input(root, row_ready, map_path):
147
        '''Inputs datasource to XML tree, mapping if needed'''
148
        # Load map header
149
        in_is_xpaths = True
150
        out_is_xpaths = True
151
        out_label = None
152
        if map_path != None:
153
            metadata = []
154
            mappings = []
155
            stream = open(map_path, 'rb')
156
            reader = csv.reader(stream)
157
            in_label, out_label = reader.next()[:2]
158
            
159
            def split_col_name(name):
160
                label, sep, root = name.partition(':')
161
                label, sep2, prefixes_str = label.partition('[')
162
                prefixes_str = strings.remove_suffix(']', prefixes_str)
163
                prefixes = strings.split(',', prefixes_str)
164
                return label, sep != '', root, prefixes
165
                    # extract datasrc from "datasrc[data_format]"
166
            
167
            in_label, in_root, prefixes = maps.col_info(in_label)
168
            in_is_xpaths = in_root != None
169
            in_label_ref[0] = in_label
170
            update_in_label()
171
            out_label, out_root = maps.col_info(out_label)[:2]
172
            out_is_xpaths = out_root != None
173
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
174
                # outer elements are types
175
            
176
            for row in reader:
177
                in_, out = row[:2]
178
                if out != '': mappings.append([in_, out_root+out])
179
            
180
            stream.close()
181
            
182
            root.ownerDocument.documentElement.tagName = out_label
183
        in_is_xml = in_is_xpaths and not in_is_db
184
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
185
        
186
        # Prevent collisions if multiple inputs mapping to same output
187
        outputs_idxs = dict()
188
        for i, mapping in enumerate(mappings):
189
            in_, out = mapping
190
            default = util.NamedTuple(count=1, first=i)
191
            idxs = outputs_idxs.setdefault(out, default)
192
            if idxs is not default: # key existed, so there was a collision
193
                if idxs.count == 1: # first key does not yet have /_alt/#
194
                    mappings[idxs.first][1] += '/_alt/0'
195
                mappings[i][1] += '/_alt/'+str(idxs.count)
196
                idxs.count += 1
197
        
198
        def process_rows(process_row, rows, rows_start=0):
199
            '''Processes input rows      
200
            @param process_row(in_row, i)
201
            @rows_start The (0-based) row # of the first row in rows. Set this
202
                only if the pre-start rows have already been skipped.
203
            '''
204
            rows = iter(rows)
205
            
206
            if end != None: row_nums = xrange(rows_start, end)
207
            else: row_nums = itertools.count(rows_start)
208
            for i in row_nums:
209
                try: row = rows.next()
210
                except StopIteration: break # no more rows
211
                if i < start: continue # not at start row yet
212
                
213
                process_row(row, i)
214
                row_ready(i, row)
215
            row_ct = i-start
216
            return row_ct
217
        
218
        def map_rows(get_value, rows, **kw_args):
219
            '''Maps input rows
220
            @param get_value(in_, row):str
221
            '''
222
            id_node = None
223
            if out_is_db:
224
                for i, mapping in enumerate(mappings):
225
                    in_, out = mapping
226
                    # All put_obj()s should return the same id_node
227
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
228
                        '$'+str(in_)) # value is placeholder that documents name
229
                #assert id_node != None
230
                
231
                if debug: # only str() if debug
232
                    log_debug('Put template:\n'+str(root))
233
                prep_root()
234
            
235
            def process_row(row, i):
236
                row_id = str(i)
237
                for in_, out in mappings:
238
                    log_debug('Getting '+str(in_))
239
                    value = metadata_value(in_)
240
                    if value == None: value = cleanup(get_value(in_, row))
241
                    log_debug('Putting '+repr(value)+' to '+str(out))
242
                    if out_is_db or value != None:
243
                        xpath.put_obj(root, out, row_id, has_types, value)
244
                if debug: log_debug('Putting:\n'+str(root))# only str() if debug
245
            return process_rows(process_row, rows, **kw_args)
246
        
247
        def map_table(col_names, rows, **kw_args):
248
            col_names_ct = len(col_names)
249
            col_idxs = util.list_flip(col_names)
250
            
251
            # Resolve prefixes
252
            mappings_orig = mappings[:] # save a copy
253
            mappings[:] = [] # empty existing elements
254
            for in_, out in mappings_orig:
255
                if metadata_value(in_) == None:
256
                    try: names = get_with_prefix(col_idxs, prefixes, in_)
257
                    except KeyError: pass
258
                    else: mappings.append([names, out])
259
            
260
            def get_value(in_, row):
261
                return util.coalesce(*util.list_subset(row.list, in_))
262
            def wrap_row(row):
263
                return util.ListDict(util.list_as_length(row, col_names_ct),
264
                    col_names, col_idxs) # handle CSV rows of different lengths
265
            
266
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
267
        
268
        stdin = streams.LineCountStream(sys.stdin)
269
        def on_error(e):
270
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
271
            ex_tracker.track(e)
272
        
273
        if in_is_db:
274
            in_db = connect_db(in_db_config)
275
            
276
            # Get table and schema name
277
            schema = in_schema # modified, so can't have same name as outer var
278
            table = in_table # modified, so can't have same name as outer var
279
            if table == None:
280
                assert in_is_xpaths
281
                schema, sep, table = in_root.partition('.')
282
                if sep == '': # only the table name was specified
283
                    table = schema
284
                    schema = None
285
            table_is_esc = False
286
            if schema != None:
287
                table = sql.qual_name(in_db, schema, table)
288
                table_is_esc = True
289
            
290
            # Fetch rows
291
            if by_col: limit = 0 # only fetch column names
292
            else: limit = n
293
            cur = sql.select(in_db, table, limit=limit, start=start,
294
                table_is_esc=table_is_esc)
295
            col_names = list(sql.col_names(cur))
296
            
297
            if by_col:
298
                row_ready = lambda row_num, input_row: None# disable row_ready()
299
                row = ['$'+v for v in col_names] # values are the column names
300
                map_table(col_names, [row]) # map just the sample row
301
                xml_func.strip(root)
302
                db_xml.put_table(in_db, root.firstChild, table, commit,
303
                    row_ins_ct_ref, table_is_esc)
304
            else:
305
                # Use normal by-row method
306
                row_ct = map_table(col_names, sql.rows(cur), rows_start=start)
307
                    # rows_start: pre-start rows have been skipped
308
            
309
            in_db.db.close()
310
        elif in_is_xml:
311
            def get_rows(doc2rows):
312
                return iters.flatten(itertools.imap(doc2rows,
313
                    xml_parse.docs_iter(stdin, on_error)))
314
            
315
            if map_path == None:
316
                def doc2rows(in_xml_root):
317
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
318
                    util.skip(iter_, xml_dom.is_text) # skip metadata
319
                    return iter_
320
                
321
                row_ct = process_rows(lambda row, i: root.appendChild(row),
322
                    get_rows(doc2rows))
323
            else:
324
                def doc2rows(in_xml_root):
325
                    rows = xpath.get(in_xml_root, in_root, limit=end)
326
                    if rows == []: raise SystemExit('Map error: Root "'
327
                        +in_root+'" not found in input')
328
                    return rows
329
                
330
                def get_value(in_, row):
331
                    in_ = './{'+(','.join(strings.with_prefixes(
332
                        ['']+prefixes, in_)))+'}' # also with no prefix
333
                    nodes = xpath.get(row, in_, allow_rooted=False)
334
                    if nodes != []: return xml_dom.value(nodes[0])
335
                    else: return None
336
                
337
                row_ct = map_rows(get_value, get_rows(doc2rows))
338
        else: # input is CSV
339
            map_ = dict(mappings)
340
            reader, col_names = csvs.reader_and_header(sys.stdin)
341
            row_ct = map_table(col_names, reader)
342
        
343
        return row_ct
344
    
345
    def process_inputs(root, row_ready):
346
        row_ct = 0
347
        for map_path in map_paths:
348
            row_ct += process_input(root, row_ready, map_path)
349
        return row_ct
350
    
351
    pool.share_vars(locals())
352
    if out_is_db:
353
        import db_xml
354
        
355
        out_db = connect_db(out_db_config)
356
        try:
357
            if redo: sql.empty_db(out_db)
358
            pool.share_vars(locals())
359
            
360
            def row_ready(row_num, input_row):
361
                def on_error(e):
362
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num+1))
363
                        # row # is interally 0-based, but 1-based to the user
364
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
365
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
366
                    ex_tracker.track(e, row_num)
367
                pool.share_vars(locals())
368
                
369
                xml_func.process(root, on_error)
370
                if not xml_dom.is_empty(root):
371
                    assert xml_dom.has_one_child(root)
372
                    try:
373
                        sql.with_savepoint(out_db,
374
                            lambda: db_xml.put(out_db, root.firstChild,
375
                                row_ins_ct_ref, on_error))
376
                        if commit: out_db.db.commit()
377
                    except sql.DatabaseErrors, e: on_error(e)
378
                prep_root()
379
            
380
            row_ct = process_inputs(root, row_ready)
381
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
382
                ' new rows into database\n')
383
            
384
            # Consume asynchronous tasks
385
            pool.main_loop()
386
        finally:
387
            out_db.db.rollback()
388
            out_db.db.close()
389
    else:
390
        def on_error(e): ex_tracker.track(e)
391
        def row_ready(row_num, input_row): pass
392
        row_ct = process_inputs(root, row_ready)
393
        xml_func.process(root, on_error)
394
        if out_is_xml_ref[0]:
395
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
396
        else: # output is CSV
397
            raise NotImplementedError('CSV output not supported yet')
398
    
399
    # Consume any asynchronous tasks not already consumed above
400
    pool.main_loop()
401
    
402
    profiler.stop(row_ct)
403
    ex_tracker.add_iters(row_ct)
404
    if verbose:
405
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
406
        sys.stderr.write(profiler.msg()+'\n')
407
        sys.stderr.write(ex_tracker.msg()+'\n')
408
    ex_tracker.exit()
409

    
410
def main():
411
    try: main_()
412
    except Parser.SyntaxError, e: raise SystemExit(str(e))
413

    
414
if __name__ == '__main__':
415
    profile_to = opts.get_env_var('profile_to', None)
416
    if profile_to != None:
417
        import cProfile
418
        sys.stderr.write('Profiling to '+profile_to+'\n')
419
        cProfile.run(main.func_code, profile_to)
420
    else: main()
(25-25/47)