Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]')
55
    
56
    ## Get config from env vars
57
    
58
    # Modes
59
    test = opts.env_flag('test', False, env_names)
60
    commit = opts.env_flag('commit', False, env_names) and not test
61
        # never commit in test mode
62
    redo = opts.env_flag('redo', test, env_names) and not commit
63
        # never redo in commit mode (manually run `make empty_db` instead)
64
    
65
    # Ranges
66
    start = util.cast(int, opts.get_env_var('start', '0', env_names))
67
    if test: end_default = 1
68
    else: end_default = None
69
    end = util.cast(int, util.none_if(
70
        opts.get_env_var('n', end_default, env_names), u''))
71
    if end != None: end += start
72
    
73
    # Optimization
74
    if test: cpus_default = 0
75
    else: cpus_default = None
76
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
77
        env_names), u''))
78
    
79
    # Debugging
80
    debug = opts.env_flag('debug', False, env_names)
81
    sql.run_raw_query.debug = debug
82
    verbose = debug or opts.env_flag('verbose', not test, env_names)
83
    opts.get_env_var('profile_to', None, env_names) # add to env_names
84
    
85
    # DB
86
    db_config_names = ['engine', 'host', 'user', 'password', 'database']
87
    def get_db_config(prefix):
88
        return opts.get_env_vars(db_config_names, prefix, env_names)
89
    in_db_config = get_db_config('in')
90
    out_db_config = get_db_config('out')
91
    in_is_db = 'engine' in in_db_config
92
    out_is_db = 'engine' in out_db_config
93
    
94
    ##
95
    
96
    # Logging
97
    def log(msg, on=verbose):
98
        if on: sys.stderr.write(msg+'\n')
99
    if debug: log_debug = lambda msg: log(msg, debug)
100
    else: log_debug = sql.log_debug_none
101
    
102
    # Parse args
103
    map_paths = sys.argv[1:]
104
    if map_paths == []:
105
        if in_is_db or not out_is_db: usage_err()
106
        else: map_paths = [None]
107
    
108
    def connect_db(db_config):
109
        log('Connecting to '+sql.db_config_str(db_config))
110
        return sql.connect(db_config, log_debug=log_debug)
111
    
112
    if end != None: end_str = str(end-1)
113
    else: end_str = 'end'
114
    log('Processing input rows '+str(start)+'-'+end_str)
115
    
116
    ex_tracker = exc.ExPercentTracker(iter_text='row')
117
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
118
    
119
    # Parallel processing
120
    pool = parallel.MultiProducerPool(cpus)
121
    log('Using '+str(pool.process_ct)+' parallel CPUs')
122
    
123
    doc = xml_dom.create_doc()
124
    root = doc.documentElement
125
    out_is_xml_ref = [False]
126
    in_label_ref = [None]
127
    def update_in_label():
128
        if in_label_ref[0] != None:
129
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
130
    def prep_root():
131
        root.clear()
132
        update_in_label()
133
    prep_root()
134
    
135
    def process_input(root, row_ready, map_path):
136
        '''Inputs datasource to XML tree, mapping if needed'''
137
        # Load map header
138
        in_is_xpaths = True
139
        out_is_xpaths = True
140
        out_label = None
141
        if map_path != None:
142
            metadata = []
143
            mappings = []
144
            stream = open(map_path, 'rb')
145
            reader = csv.reader(stream)
146
            in_label, out_label = reader.next()[:2]
147
            
148
            def split_col_name(name):
149
                label, sep, root = name.partition(':')
150
                label, sep2, prefixes_str = label.partition('[')
151
                prefixes_str = strings.remove_suffix(']', prefixes_str)
152
                prefixes = strings.split(',', prefixes_str)
153
                return label, sep != '', root, prefixes
154
                    # extract datasrc from "datasrc[data_format]"
155
            
156
            in_label, in_root, prefixes = maps.col_info(in_label)
157
            in_is_xpaths = in_root != None
158
            in_label_ref[0] = in_label
159
            update_in_label()
160
            out_label, out_root = maps.col_info(out_label)[:2]
161
            out_is_xpaths = out_root != None
162
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
163
                # outer elements are types
164
            
165
            for row in reader:
166
                in_, out = row[:2]
167
                if out != '':
168
                    if out_is_xpaths: out = xpath.parse(out_root+out)
169
                    mappings.append((in_, out))
170
            
171
            stream.close()
172
            
173
            root.ownerDocument.documentElement.tagName = out_label
174
        in_is_xml = in_is_xpaths and not in_is_db
175
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
176
        
177
        def process_rows(process_row, rows, rows_start=0):
178
            '''Processes input rows      
179
            @param process_row(in_row, i)
180
            @rows_start The row # of the first row in rows. Set this only if the
181
                pre-start rows have already been skipped.
182
            '''
183
            rows = iter(rows)
184
            
185
            if end != None: row_nums = xrange(rows_start, end)
186
            else: row_nums = itertools.count(rows_start)
187
            for i in row_nums:
188
                try: row = rows.next()
189
                except StopIteration: break # no more rows
190
                if i < start: continue # not at start row yet
191
                
192
                process_row(row, i)
193
                row_ready(i, row)
194
            row_ct = i-start
195
            return row_ct
196
        
197
        def map_rows(get_value, rows, **kw_args):
198
            '''Maps input rows
199
            @param get_value(in_, row):str
200
            '''
201
            def process_row(row, i):
202
                row_id = str(i)
203
                for in_, out in mappings:
204
                    value = metadata_value(in_)
205
                    if value == None:
206
                        log_debug('Getting '+str(in_))
207
                        value = cleanup(get_value(in_, row))
208
                    if value != None:
209
                        log_debug('Putting '+str(out))
210
                        xpath.put_obj(root, out, row_id, has_types, value)
211
            return process_rows(process_row, rows, **kw_args)
212
        
213
        def map_table(col_names, rows, **kw_args):
214
            col_names_ct = len(col_names)
215
            col_idxs = util.list_flip(col_names)
216
            
217
            i = 0
218
            while i < len(mappings): # mappings len changes in loop
219
                in_, out = mappings[i]
220
                if metadata_value(in_) == None:
221
                    try: mappings[i] = (
222
                        get_with_prefix(col_idxs, prefixes, in_), out)
223
                    except KeyError:
224
                        del mappings[i]
225
                        continue # keep i the same
226
                i += 1
227
            
228
            def get_value(in_, row):
229
                return util.coalesce(*util.list_subset(row.list, in_))
230
            def wrap_row(row):
231
                return util.ListDict(util.list_as_length(row, col_names_ct),
232
                    col_names, col_idxs) # handle CSV rows of different lengths
233
            
234
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
235
        
236
        stdin = streams.LineCountStream(sys.stdin)
237
        def on_error(e):
238
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
239
            ex_tracker.track(e)
240
        
241
        if in_is_db:
242
            assert in_is_xpaths
243
            
244
            in_db = connect_db(in_db_config)
245
            cur = sql.select(in_db, table=in_root, limit=end, start=start)
246
            row_ct = map_table(list(sql.col_names(cur)), sql.rows(cur),
247
                rows_start=start) # rows_start: pre-start rows have been skipped
248
            
249
            in_db.db.close()
250
        elif in_is_xml:
251
            def get_rows(doc2rows):
252
                return iters.flatten(itertools.imap(doc2rows,
253
                    xml_parse.docs_iter(stdin, on_error)))
254
            
255
            if map_path == None:
256
                def doc2rows(in_xml_root):
257
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
258
                    util.skip(iter_, xml_dom.is_text) # skip metadata
259
                    return iter_
260
                
261
                row_ct = process_rows(lambda row, i: root.appendChild(row),
262
                    get_rows(doc2rows))
263
            else:
264
                def doc2rows(in_xml_root):
265
                    rows = xpath.get(in_xml_root, in_root, limit=end)
266
                    if rows == []: raise SystemExit('Map error: Root "'
267
                        +in_root+'" not found in input')
268
                    return rows
269
                
270
                def get_value(in_, row):
271
                    in_ = './{'+(','.join(strings.with_prefixes(
272
                        ['']+prefixes, in_)))+'}' # also with no prefix
273
                    nodes = xpath.get(row, in_, allow_rooted=False)
274
                    if nodes != []: return xml_dom.value(nodes[0])
275
                    else: return None
276
                
277
                row_ct = map_rows(get_value, get_rows(doc2rows))
278
        else: # input is CSV
279
            map_ = dict(mappings)
280
            reader, col_names = csvs.reader_and_header(sys.stdin)
281
            row_ct = map_table(col_names, reader)
282
        
283
        return row_ct
284
    
285
    def process_inputs(root, row_ready):
286
        row_ct = 0
287
        for map_path in map_paths:
288
            row_ct += process_input(root, row_ready, map_path)
289
        return row_ct
290
    
291
    pool.share_vars(locals())
292
    if out_is_db:
293
        import db_xml
294
        
295
        out_db = connect_db(out_db_config)
296
        try:
297
            if redo: sql.empty_db(out_db)
298
            row_ins_ct_ref = [0]
299
            pool.share_vars(locals())
300
            
301
            def row_ready(row_num, input_row):
302
                def on_error(e):
303
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num))
304
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
305
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
306
                    ex_tracker.track(e, row_num)
307
                pool.share_vars(locals())
308
                
309
                xml_func.process(root, on_error)
310
                if not xml_dom.is_empty(root):
311
                    assert xml_dom.has_one_child(root)
312
                    try:
313
                        sql.with_savepoint(out_db,
314
                            lambda: db_xml.put(out_db, root.firstChild,
315
                                row_ins_ct_ref, on_error))
316
                        if commit: out_db.db.commit()
317
                    except sql.DatabaseErrors, e: on_error(e)
318
                prep_root()
319
            
320
            row_ct = process_inputs(root, row_ready)
321
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
322
                ' new rows into database\n')
323
            
324
            # Consume asynchronous tasks
325
            pool.main_loop()
326
        finally:
327
            out_db.db.rollback()
328
            out_db.db.close()
329
    else:
330
        def on_error(e): ex_tracker.track(e)
331
        def row_ready(row_num, input_row): pass
332
        row_ct = process_inputs(root, row_ready)
333
        xml_func.process(root, on_error)
334
        if out_is_xml_ref[0]:
335
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
336
        else: # output is CSV
337
            raise NotImplementedError('CSV output not supported yet')
338
    
339
    # Consume any asynchronous tasks not already consumed above
340
    pool.main_loop()
341
    
342
    profiler.stop(row_ct)
343
    ex_tracker.add_iters(row_ct)
344
    if verbose:
345
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
346
        sys.stderr.write(profiler.msg()+'\n')
347
        sys.stderr.write(ex_tracker.msg()+'\n')
348
    ex_tracker.exit()
349

    
350
def main():
351
    try: main_()
352
    except Parser.SyntaxError, e: raise SystemExit(str(e))
353

    
354
if __name__ == '__main__':
355
    profile_to = opts.get_env_var('profile_to', None)
356
    if profile_to != None:
357
        import cProfile
358
        sys.stderr.write('Profiling to '+profile_to+'\n')
359
        cProfile.run(main.func_code, profile_to)
360
    else: main()
(24-24/44)