Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import db_xml
17
import exc
18
import iters
19
import maps
20
import opts
21
import parallelproc
22
import Parser
23
import profiling
24
import sql
25
import streams
26
import strings
27
import term
28
import util
29
import xpath
30
import xml_dom
31
import xml_func
32
import xml_parse
33

    
34
def get_with_prefix(map_, prefixes, key):
35
    '''Gets all entries for the given key with any of the given prefixes
36
    @return tuple(found_key, found_value)
37
    '''
38
    values = []
39
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
40
        try: value = map_[key_]
41
        except KeyError, e: continue # keep going
42
        values.append((key_, value))
43
    
44
    if values != []: return values
45
    else: raise e # re-raise last KeyError
46

    
47
def metadata_value(name): return None # this feature has been removed
48

    
49
def cleanup(val):
50
    if val == None: return val
51
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
52

    
53
def main_():
54
    env_names = []
55
    def usage_err():
56
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
57
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
58
            'Note: Row #s start with 1')
59
    
60
    ## Get config from env vars
61
    
62
    # Modes
63
    test = opts.env_flag('test', False, env_names)
64
    commit = opts.env_flag('commit', False, env_names) and not test
65
        # never commit in test mode
66
    redo = opts.env_flag('redo', test, env_names) and not commit
67
        # never redo in commit mode (manually run `make empty_db` instead)
68
    
69
    # Ranges
70
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
71
    # Make start interally 0-based.
72
    # It's 1-based to the user to match up with the staging table row #s.
73
    start -= 1
74
    if test: n_default = 1
75
    else: n_default = None
76
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
77
        u''))
78
    end = n
79
    if end != None: end += start
80
    
81
    # Debugging
82
    debug = opts.env_flag('debug', False, env_names)
83
    sql.run_raw_query.debug = debug
84
    verbose = debug or opts.env_flag('verbose', not test, env_names)
85
    verbose_errors = opts.env_flag('verbose_errors', test, env_names)
86
    opts.get_env_var('profile_to', None, env_names) # add to env_names
87
    
88
    # DB
89
    def get_db_config(prefix):
90
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
91
    in_db_config = get_db_config('in')
92
    out_db_config = get_db_config('out')
93
    in_is_db = 'engine' in in_db_config
94
    out_is_db = 'engine' in out_db_config
95
    in_schema = opts.get_env_var('in_schema', None, env_names)
96
    in_table = opts.get_env_var('in_table', None, env_names)
97
    
98
    # Optimization
99
    cache_sql = opts.env_flag('cache_sql', True, env_names)
100
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
101
        env_names) # by-column optimization only applies if mapping to same DB
102
    if test: cpus_default = 0
103
    else: cpus_default = 0 # or None to use parallel processing by default
104
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
105
        env_names), u''))
106
    
107
    ##
108
    
109
    # Logging
110
    def log(msg, on=verbose):
111
        if on: sys.stderr.write(msg+'\n')
112
    if debug: log_debug = lambda msg: log(msg, debug)
113
    else: log_debug = sql.log_debug_none
114
    
115
    # Parse args
116
    map_paths = sys.argv[1:]
117
    if map_paths == []:
118
        if in_is_db or not out_is_db: usage_err()
119
        else: map_paths = [None]
120
    
121
    def connect_db(db_config):
122
        log('Connecting to '+sql.db_config_str(db_config))
123
        return sql.connect(db_config, log_debug=log_debug, caching=cache_sql)
124
    
125
    if end != None: end_str = str(end-1) # end is one past the last #
126
    else: end_str = 'end'
127
    log('Processing input rows '+str(start)+'-'+end_str)
128
    
129
    ex_tracker = exc.ExPercentTracker(iter_text='row')
130
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
131
    
132
    # Parallel processing
133
    pool = parallelproc.MultiProducerPool(cpus)
134
    log('Using '+str(pool.process_ct)+' parallel CPUs')
135
    
136
    doc = xml_dom.create_doc()
137
    root = doc.documentElement
138
    out_is_xml_ref = [False]
139
    in_label_ref = [None]
140
    def update_in_label():
141
        if in_label_ref[0] != None:
142
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
143
    def prep_root():
144
        root.clear()
145
        update_in_label()
146
    prep_root()
147
    
148
    # Define before the out_is_db section because it's used by by_col
149
    row_ins_ct_ref = [0]
150
    
151
    def process_input(root, row_ready, map_path):
152
        '''Inputs datasource to XML tree, mapping if needed'''
153
        # Load map header
154
        in_is_xpaths = True
155
        out_is_xpaths = True
156
        out_label = None
157
        if map_path != None:
158
            metadata = []
159
            mappings = []
160
            stream = open(map_path, 'rb')
161
            reader = csv.reader(stream)
162
            in_label, out_label = reader.next()[:2]
163
            
164
            def split_col_name(name):
165
                label, sep, root = name.partition(':')
166
                label, sep2, prefixes_str = label.partition('[')
167
                prefixes_str = strings.remove_suffix(']', prefixes_str)
168
                prefixes = strings.split(',', prefixes_str)
169
                return label, sep != '', root, prefixes
170
                    # extract datasrc from "datasrc[data_format]"
171
            
172
            in_label, in_root, prefixes = maps.col_info(in_label)
173
            in_is_xpaths = in_root != None
174
            in_label_ref[0] = in_label
175
            update_in_label()
176
            out_label, out_root = maps.col_info(out_label)[:2]
177
            out_is_xpaths = out_root != None
178
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
179
                # outer elements are types
180
            
181
            for row in reader:
182
                in_, out = row[:2]
183
                if out != '': mappings.append([in_, out_root+out])
184
            
185
            stream.close()
186
            
187
            root.ownerDocument.documentElement.tagName = out_label
188
        in_is_xml = in_is_xpaths and not in_is_db
189
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
190
        
191
        def process_rows(process_row, rows, rows_start=0):
192
            '''Processes input rows      
193
            @param process_row(in_row, i)
194
            @rows_start The (0-based) row # of the first row in rows. Set this
195
                only if the pre-start rows have already been skipped.
196
            '''
197
            rows = iter(rows)
198
            
199
            if end != None: row_nums = xrange(rows_start, end)
200
            else: row_nums = itertools.count(rows_start)
201
            i = -1
202
            for i in row_nums:
203
                try: row = rows.next()
204
                except StopIteration:
205
                    i -= 1 # last row # didn't count
206
                    break # no more rows
207
                if i < start: continue # not at start row yet
208
                
209
                process_row(row, i)
210
                row_ready(i, row)
211
            row_ct = i-start+1
212
            return row_ct
213
        
214
        def map_rows(get_value, rows, **kw_args):
215
            '''Maps input rows
216
            @param get_value(in_, row):str
217
            '''
218
            # Prevent collisions if multiple inputs mapping to same output
219
            outputs_idxs = dict()
220
            for i, mapping in enumerate(mappings):
221
                in_, out = mapping
222
                default = util.NamedTuple(count=1, first=i)
223
                idxs = outputs_idxs.setdefault(out, default)
224
                if idxs is not default: # key existed, so there was a collision
225
                    if idxs.count == 1: # first key does not yet have /_alt/#
226
                        mappings[idxs.first][1] += '/_alt/0'
227
                    mappings[i][1] += '/_alt/'+str(idxs.count)
228
                    idxs.count += 1
229
            
230
            id_node = None
231
            if out_is_db:
232
                for i, mapping in enumerate(mappings):
233
                    in_, out = mapping
234
                    # All put_obj()s should return the same id_node
235
                    nodes, id_node = xpath.put_obj(root, out, '-1', has_types,
236
                        '$'+str(in_)) # value is placeholder that documents name
237
                    mappings[i] = [in_, nodes]
238
                assert id_node != None
239
                
240
                if debug: # only str() if debug
241
                    log_debug('Put template:\n'+str(root))
242
            
243
            def process_row(row, i):
244
                row_id = str(i)
245
                if id_node != None: xml_dom.set_value(id_node, row_id)
246
                for in_, out in mappings:
247
                    log_debug('Getting '+str(in_))
248
                    value = metadata_value(in_)
249
                    if value == None: value = cleanup(get_value(in_, row))
250
                    log_debug('Putting '+repr(value)+' to '+str(out))
251
                    if out_is_db: # out is list of XML nodes
252
                        for node in out: xml_dom.set_value(node, value)
253
                    elif value != None: # out is XPath
254
                        xpath.put_obj(root, out, row_id, has_types, value)
255
                if debug: log_debug('Putting:\n'+str(root))# only str() if debug
256
            return process_rows(process_row, rows, **kw_args)
257
        
258
        def map_table(col_names, rows, **kw_args):
259
            col_names_ct = len(col_names)
260
            col_idxs = util.list_flip(col_names)
261
            
262
            # Resolve prefixes
263
            mappings_orig = mappings[:] # save a copy
264
            mappings[:] = [] # empty existing elements
265
            for in_, out in mappings_orig:
266
                if metadata_value(in_) == None:
267
                    try: cols = get_with_prefix(col_idxs, prefixes, in_)
268
                    except KeyError: pass
269
                    else: mappings[len(mappings):] = [[db_xml.ColRef(*col), out]
270
                        for col in cols] # can't use += because that uses =
271
            
272
            def get_value(in_, row): return row.list[in_.idx]
273
            def wrap_row(row):
274
                return util.ListDict(util.list_as_length(row, col_names_ct),
275
                    col_names, col_idxs) # handle CSV rows of different lengths
276
            
277
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
278
        
279
        stdin = streams.LineCountStream(sys.stdin)
280
        def on_error(e):
281
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
282
            ex_tracker.track(e)
283
        
284
        if in_is_db:
285
            in_db = connect_db(in_db_config)
286
            
287
            # Get table and schema name
288
            schema = in_schema # modified, so can't have same name as outer var
289
            table = in_table # modified, so can't have same name as outer var
290
            if table == None:
291
                assert in_is_xpaths
292
                schema, sep, table = in_root.partition('.')
293
                if sep == '': # only the table name was specified
294
                    table = schema
295
                    schema = None
296
            table_is_esc = False
297
            if schema != None:
298
                table = sql.qual_name(in_db, schema, table)
299
                table_is_esc = True
300
            
301
            # Fetch rows
302
            if by_col: limit = 0 # only fetch column names
303
            else: limit = n
304
            cur = sql.select(in_db, table, limit=limit, start=start,
305
                cacheable=False, table_is_esc=table_is_esc)
306
            col_names = list(sql.col_names(cur))
307
            
308
            if by_col:
309
                map_table(col_names, []) # just create the template
310
                xml_func.strip(root)
311
                db_xml.put_table(in_db, root.firstChild, table, commit,
312
                    row_ins_ct_ref, table_is_esc)
313
            else:
314
                # Use normal by-row method
315
                row_ct = map_table(col_names, sql.rows(cur), rows_start=start)
316
                    # rows_start: pre-start rows have been skipped
317
            
318
            in_db.db.close()
319
        elif in_is_xml:
320
            def get_rows(doc2rows):
321
                return iters.flatten(itertools.imap(doc2rows,
322
                    xml_parse.docs_iter(stdin, on_error)))
323
            
324
            if map_path == None:
325
                def doc2rows(in_xml_root):
326
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
327
                    util.skip(iter_, xml_dom.is_text) # skip metadata
328
                    return iter_
329
                
330
                row_ct = process_rows(lambda row, i: root.appendChild(row),
331
                    get_rows(doc2rows))
332
            else:
333
                def doc2rows(in_xml_root):
334
                    rows = xpath.get(in_xml_root, in_root, limit=end)
335
                    if rows == []: raise SystemExit('Map error: Root "'
336
                        +in_root+'" not found in input')
337
                    return rows
338
                
339
                def get_value(in_, row):
340
                    in_ = './{'+(','.join(strings.with_prefixes(
341
                        ['']+prefixes, in_)))+'}' # also with no prefix
342
                    nodes = xpath.get(row, in_, allow_rooted=False)
343
                    if nodes != []: return xml_dom.value(nodes[0])
344
                    else: return None
345
                
346
                row_ct = map_rows(get_value, get_rows(doc2rows))
347
        else: # input is CSV
348
            map_ = dict(mappings)
349
            reader, col_names = csvs.reader_and_header(sys.stdin)
350
            row_ct = map_table(col_names, reader)
351
        
352
        return row_ct
353
    
354
    def process_inputs(root, row_ready):
355
        row_ct = 0
356
        for map_path in map_paths:
357
            row_ct += process_input(root, row_ready, map_path)
358
        return row_ct
359
    
360
    pool.share_vars(locals())
361
    if out_is_db:
362
        out_db = connect_db(out_db_config)
363
        try:
364
            if redo: sql.empty_db(out_db)
365
            pool.share_vars(locals())
366
            
367
            def row_ready(row_num, input_row):
368
                def on_error(e):
369
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num+1))
370
                        # row # is interally 0-based, but 1-based to the user
371
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
372
                    if verbose_errors:
373
                        exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
374
                    ex_tracker.track(e, row_num, detail=verbose_errors)
375
                pool.share_vars(locals())
376
                
377
                row_root = root.cloneNode(True) # deep copy so don't modify root
378
                xml_func.process(row_root, on_error)
379
                if not xml_dom.is_empty(row_root):
380
                    assert xml_dom.has_one_child(row_root)
381
                    try:
382
                        sql.with_savepoint(out_db,
383
                            lambda: db_xml.put(out_db, row_root.firstChild,
384
                                row_ins_ct_ref, on_error))
385
                        if commit: out_db.db.commit()
386
                    except sql.DatabaseErrors, e: on_error(e)
387
            
388
            row_ct = process_inputs(root, row_ready)
389
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
390
                ' new rows into database\n')
391
            
392
            # Consume asynchronous tasks
393
            pool.main_loop()
394
        finally:
395
            out_db.db.rollback()
396
            out_db.db.close()
397
    else:
398
        def on_error(e): ex_tracker.track(e)
399
        def row_ready(row_num, input_row): pass
400
        row_ct = process_inputs(root, row_ready)
401
        xml_func.process(root, on_error)
402
        if out_is_xml_ref[0]:
403
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
404
        else: # output is CSV
405
            raise NotImplementedError('CSV output not supported yet')
406
    
407
    # Consume any asynchronous tasks not already consumed above
408
    pool.main_loop()
409
    
410
    profiler.stop(row_ct)
411
    ex_tracker.add_iters(row_ct)
412
    if verbose:
413
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
414
        sys.stderr.write(profiler.msg()+'\n')
415
        sys.stderr.write(ex_tracker.msg()+'\n')
416
    ex_tracker.exit()
417

    
418
def main():
419
    try: main_()
420
    except Parser.SyntaxError, e: raise SystemExit(str(e))
421

    
422
if __name__ == '__main__':
423
    profile_to = opts.get_env_var('profile_to', None)
424
    if profile_to != None:
425
        import cProfile
426
        sys.stderr.write('Profiling to '+profile_to+'\n')
427
        cProfile.run(main.func_code, profile_to)
428
    else: main()
(25-25/47)