Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
55
            'Note: Row #s start with 0')
56
    
57
    ## Get config from env vars
58
    
59
    # Modes
60
    test = opts.env_flag('test', False, env_names)
61
    commit = opts.env_flag('commit', False, env_names) and not test
62
        # never commit in test mode
63
    redo = opts.env_flag('redo', test, env_names) and not commit
64
        # never redo in commit mode (manually run `make empty_db` instead)
65
    
66
    # Ranges
67
    start = util.cast(int, opts.get_env_var('start', 0, env_names)) # 0-based
68
    if test: n_default = 1
69
    else: n_default = None
70
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
71
        u''))
72
    end = n
73
    if end != None: end += start
74
    
75
    # Optimization
76
    if test: cpus_default = 0
77
    else: cpus_default = None
78
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
79
        env_names), u''))
80
    
81
    # Debugging
82
    debug = opts.env_flag('debug', False, env_names)
83
    sql.run_raw_query.debug = debug
84
    verbose = debug or opts.env_flag('verbose', not test, env_names)
85
    opts.get_env_var('profile_to', None, env_names) # add to env_names
86
    
87
    # DB
88
    def get_db_config(prefix):
89
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
90
    in_db_config = get_db_config('in')
91
    out_db_config = get_db_config('out')
92
    in_is_db = 'engine' in in_db_config
93
    out_is_db = 'engine' in out_db_config
94
    
95
    ##
96
    
97
    # Logging
98
    def log(msg, on=verbose):
99
        if on: sys.stderr.write(msg+'\n')
100
    if debug: log_debug = lambda msg: log(msg, debug)
101
    else: log_debug = sql.log_debug_none
102
    
103
    # Parse args
104
    map_paths = sys.argv[1:]
105
    if map_paths == []:
106
        if in_is_db or not out_is_db: usage_err()
107
        else: map_paths = [None]
108
    
109
    def connect_db(db_config):
110
        log('Connecting to '+sql.db_config_str(db_config))
111
        return sql.connect(db_config, log_debug=log_debug)
112
    
113
    if end != None: end_str = str(end-1) # end is one past the last #
114
    else: end_str = 'end'
115
    log('Processing input rows '+str(start)+'-'+end_str)
116
    
117
    ex_tracker = exc.ExPercentTracker(iter_text='row')
118
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
119
    
120
    # Parallel processing
121
    pool = parallel.MultiProducerPool(cpus)
122
    log('Using '+str(pool.process_ct)+' parallel CPUs')
123
    
124
    doc = xml_dom.create_doc()
125
    root = doc.documentElement
126
    out_is_xml_ref = [False]
127
    in_label_ref = [None]
128
    def update_in_label():
129
        if in_label_ref[0] != None:
130
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
131
    def prep_root():
132
        root.clear()
133
        update_in_label()
134
    prep_root()
135
    
136
    def process_input(root, row_ready, map_path):
137
        '''Inputs datasource to XML tree, mapping if needed'''
138
        # Load map header
139
        in_is_xpaths = True
140
        out_is_xpaths = True
141
        out_label = None
142
        if map_path != None:
143
            metadata = []
144
            mappings = []
145
            stream = open(map_path, 'rb')
146
            reader = csv.reader(stream)
147
            in_label, out_label = reader.next()[:2]
148
            
149
            def split_col_name(name):
150
                label, sep, root = name.partition(':')
151
                label, sep2, prefixes_str = label.partition('[')
152
                prefixes_str = strings.remove_suffix(']', prefixes_str)
153
                prefixes = strings.split(',', prefixes_str)
154
                return label, sep != '', root, prefixes
155
                    # extract datasrc from "datasrc[data_format]"
156
            
157
            in_label, in_root, prefixes = maps.col_info(in_label)
158
            in_is_xpaths = in_root != None
159
            in_label_ref[0] = in_label
160
            update_in_label()
161
            out_label, out_root = maps.col_info(out_label)[:2]
162
            out_is_xpaths = out_root != None
163
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
164
                # outer elements are types
165
            
166
            for row in reader:
167
                in_, out = row[:2]
168
                if out != '':
169
                    if out_is_xpaths: out = xpath.parse(out_root+out)
170
                    mappings.append((in_, out))
171
            
172
            stream.close()
173
            
174
            root.ownerDocument.documentElement.tagName = out_label
175
        in_is_xml = in_is_xpaths and not in_is_db
176
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
177
        
178
        def process_rows(process_row, rows, rows_start=0):
179
            '''Processes input rows      
180
            @param process_row(in_row, i)
181
            @rows_start The (0-based) row # of the first row in rows. Set this
182
                only if the pre-start rows have already been skipped.
183
            '''
184
            rows = iter(rows)
185
            
186
            if end != None: row_nums = xrange(rows_start, end)
187
            else: row_nums = itertools.count(rows_start)
188
            for i in row_nums:
189
                try: row = rows.next()
190
                except StopIteration: break # no more rows
191
                if i < start: continue # not at start row yet
192
                
193
                process_row(row, i)
194
                row_ready(i, row)
195
            row_ct = i-start
196
            return row_ct
197
        
198
        def map_rows(get_value, rows, **kw_args):
199
            '''Maps input rows
200
            @param get_value(in_, row):str
201
            '''
202
            def process_row(row, i):
203
                row_id = str(i)
204
                for in_, out in mappings:
205
                    value = metadata_value(in_)
206
                    if value == None:
207
                        log_debug('Getting '+str(in_))
208
                        value = cleanup(get_value(in_, row))
209
                    if value != None:
210
                        log_debug('Putting '+str(out))
211
                        xpath.put_obj(root, out, row_id, has_types, value)
212
            return process_rows(process_row, rows, **kw_args)
213
        
214
        def map_table(col_names, rows, **kw_args):
215
            col_names_ct = len(col_names)
216
            col_idxs = util.list_flip(col_names)
217
            
218
            i = 0
219
            while i < len(mappings): # mappings len changes in loop
220
                in_, out = mappings[i]
221
                if metadata_value(in_) == None:
222
                    try: mappings[i] = (
223
                        get_with_prefix(col_idxs, prefixes, in_), out)
224
                    except KeyError:
225
                        del mappings[i]
226
                        continue # keep i the same
227
                i += 1
228
            
229
            def get_value(in_, row):
230
                return util.coalesce(*util.list_subset(row.list, in_))
231
            def wrap_row(row):
232
                return util.ListDict(util.list_as_length(row, col_names_ct),
233
                    col_names, col_idxs) # handle CSV rows of different lengths
234
            
235
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
236
        
237
        stdin = streams.LineCountStream(sys.stdin)
238
        def on_error(e):
239
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
240
            ex_tracker.track(e)
241
        
242
        if in_is_db:
243
            assert in_is_xpaths
244
            
245
            in_db = connect_db(in_db_config)
246
            cur = sql.select(in_db, table=in_root, limit=n, start=start)
247
            row_ct = map_table(list(sql.col_names(cur)), sql.rows(cur),
248
                rows_start=start) # rows_start: pre-start rows have been skipped
249
            
250
            in_db.db.close()
251
        elif in_is_xml:
252
            def get_rows(doc2rows):
253
                return iters.flatten(itertools.imap(doc2rows,
254
                    xml_parse.docs_iter(stdin, on_error)))
255
            
256
            if map_path == None:
257
                def doc2rows(in_xml_root):
258
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
259
                    util.skip(iter_, xml_dom.is_text) # skip metadata
260
                    return iter_
261
                
262
                row_ct = process_rows(lambda row, i: root.appendChild(row),
263
                    get_rows(doc2rows))
264
            else:
265
                def doc2rows(in_xml_root):
266
                    rows = xpath.get(in_xml_root, in_root, limit=end)
267
                    if rows == []: raise SystemExit('Map error: Root "'
268
                        +in_root+'" not found in input')
269
                    return rows
270
                
271
                def get_value(in_, row):
272
                    in_ = './{'+(','.join(strings.with_prefixes(
273
                        ['']+prefixes, in_)))+'}' # also with no prefix
274
                    nodes = xpath.get(row, in_, allow_rooted=False)
275
                    if nodes != []: return xml_dom.value(nodes[0])
276
                    else: return None
277
                
278
                row_ct = map_rows(get_value, get_rows(doc2rows))
279
        else: # input is CSV
280
            map_ = dict(mappings)
281
            reader, col_names = csvs.reader_and_header(sys.stdin)
282
            row_ct = map_table(col_names, reader)
283
        
284
        return row_ct
285
    
286
    def process_inputs(root, row_ready):
287
        row_ct = 0
288
        for map_path in map_paths:
289
            row_ct += process_input(root, row_ready, map_path)
290
        return row_ct
291
    
292
    pool.share_vars(locals())
293
    if out_is_db:
294
        import db_xml
295
        
296
        out_db = connect_db(out_db_config)
297
        try:
298
            if redo: sql.empty_db(out_db)
299
            row_ins_ct_ref = [0]
300
            pool.share_vars(locals())
301
            
302
            def row_ready(row_num, input_row):
303
                def on_error(e):
304
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num))
305
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
306
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
307
                    ex_tracker.track(e, row_num)
308
                pool.share_vars(locals())
309
                
310
                xml_func.process(root, on_error)
311
                if not xml_dom.is_empty(root):
312
                    assert xml_dom.has_one_child(root)
313
                    try:
314
                        sql.with_savepoint(out_db,
315
                            lambda: db_xml.put(out_db, root.firstChild,
316
                                row_ins_ct_ref, on_error))
317
                        if commit: out_db.db.commit()
318
                    except sql.DatabaseErrors, e: on_error(e)
319
                prep_root()
320
            
321
            row_ct = process_inputs(root, row_ready)
322
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
323
                ' new rows into database\n')
324
            
325
            # Consume asynchronous tasks
326
            pool.main_loop()
327
        finally:
328
            out_db.db.rollback()
329
            out_db.db.close()
330
    else:
331
        def on_error(e): ex_tracker.track(e)
332
        def row_ready(row_num, input_row): pass
333
        row_ct = process_inputs(root, row_ready)
334
        xml_func.process(root, on_error)
335
        if out_is_xml_ref[0]:
336
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
337
        else: # output is CSV
338
            raise NotImplementedError('CSV output not supported yet')
339
    
340
    # Consume any asynchronous tasks not already consumed above
341
    pool.main_loop()
342
    
343
    profiler.stop(row_ct)
344
    ex_tracker.add_iters(row_ct)
345
    if verbose:
346
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
347
        sys.stderr.write(profiler.msg()+'\n')
348
        sys.stderr.write(ex_tracker.msg()+'\n')
349
    ex_tracker.exit()
350

    
351
def main():
352
    try: main_()
353
    except Parser.SyntaxError, e: raise SystemExit(str(e))
354

    
355
if __name__ == '__main__':
356
    profile_to = opts.get_env_var('profile_to', None)
357
    if profile_to != None:
358
        import cProfile
359
        sys.stderr.write('Profiling to '+profile_to+'\n')
360
        cProfile.run(main.func_code, profile_to)
361
    else: main()
(25-25/45)