Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
55
            'Note: Row #s start with 1')
56
    
57
    ## Get config from env vars
58
    
59
    # Modes
60
    test = opts.env_flag('test', False, env_names)
61
    commit = opts.env_flag('commit', False, env_names) and not test
62
        # never commit in test mode
63
    redo = opts.env_flag('redo', test, env_names) and not commit
64
        # never redo in commit mode (manually run `make empty_db` instead)
65
    
66
    # Ranges
67
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
68
    # Make start interally 0-based.
69
    # It's 1-based to the user to match up with the staging table row #s.
70
    start -= 1
71
    if test: n_default = 1
72
    else: n_default = None
73
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
74
        u''))
75
    end = n
76
    if end != None: end += start
77
    
78
    # Optimization
79
    if test: cpus_default = 0
80
    else: cpus_default = None
81
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
82
        env_names), u''))
83
    
84
    # Debugging
85
    debug = opts.env_flag('debug', False, env_names)
86
    sql.run_raw_query.debug = debug
87
    verbose = debug or opts.env_flag('verbose', not test, env_names)
88
    opts.get_env_var('profile_to', None, env_names) # add to env_names
89
    
90
    # DB
91
    def get_db_config(prefix):
92
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
93
    in_db_config = get_db_config('in')
94
    out_db_config = get_db_config('out')
95
    in_is_db = 'engine' in in_db_config
96
    out_is_db = 'engine' in out_db_config
97
    
98
    ##
99
    
100
    # Logging
101
    def log(msg, on=verbose):
102
        if on: sys.stderr.write(msg+'\n')
103
    if debug: log_debug = lambda msg: log(msg, debug)
104
    else: log_debug = sql.log_debug_none
105
    
106
    # Parse args
107
    map_paths = sys.argv[1:]
108
    if map_paths == []:
109
        if in_is_db or not out_is_db: usage_err()
110
        else: map_paths = [None]
111
    
112
    def connect_db(db_config):
113
        log('Connecting to '+sql.db_config_str(db_config))
114
        return sql.connect(db_config, log_debug=log_debug)
115
    
116
    if end != None: end_str = str(end-1) # end is one past the last #
117
    else: end_str = 'end'
118
    log('Processing input rows '+str(start)+'-'+end_str)
119
    
120
    ex_tracker = exc.ExPercentTracker(iter_text='row')
121
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
122
    
123
    # Parallel processing
124
    pool = parallel.MultiProducerPool(cpus)
125
    log('Using '+str(pool.process_ct)+' parallel CPUs')
126
    
127
    doc = xml_dom.create_doc()
128
    root = doc.documentElement
129
    out_is_xml_ref = [False]
130
    in_label_ref = [None]
131
    def update_in_label():
132
        if in_label_ref[0] != None:
133
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
134
    def prep_root():
135
        root.clear()
136
        update_in_label()
137
    prep_root()
138
    
139
    def process_input(root, row_ready, map_path):
140
        '''Inputs datasource to XML tree, mapping if needed'''
141
        # Load map header
142
        in_is_xpaths = True
143
        out_is_xpaths = True
144
        out_label = None
145
        if map_path != None:
146
            metadata = []
147
            mappings = []
148
            stream = open(map_path, 'rb')
149
            reader = csv.reader(stream)
150
            in_label, out_label = reader.next()[:2]
151
            
152
            def split_col_name(name):
153
                label, sep, root = name.partition(':')
154
                label, sep2, prefixes_str = label.partition('[')
155
                prefixes_str = strings.remove_suffix(']', prefixes_str)
156
                prefixes = strings.split(',', prefixes_str)
157
                return label, sep != '', root, prefixes
158
                    # extract datasrc from "datasrc[data_format]"
159
            
160
            in_label, in_root, prefixes = maps.col_info(in_label)
161
            in_is_xpaths = in_root != None
162
            in_label_ref[0] = in_label
163
            update_in_label()
164
            out_label, out_root = maps.col_info(out_label)[:2]
165
            out_is_xpaths = out_root != None
166
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
167
                # outer elements are types
168
            
169
            for row in reader:
170
                in_, out = row[:2]
171
                if out != '':
172
                    if out_is_xpaths: out = xpath.parse(out_root+out)
173
                    mappings.append((in_, out))
174
            
175
            stream.close()
176
            
177
            root.ownerDocument.documentElement.tagName = out_label
178
        in_is_xml = in_is_xpaths and not in_is_db
179
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
180
        
181
        def process_rows(process_row, rows, rows_start=0):
182
            '''Processes input rows      
183
            @param process_row(in_row, i)
184
            @rows_start The (0-based) row # of the first row in rows. Set this
185
                only if the pre-start rows have already been skipped.
186
            '''
187
            rows = iter(rows)
188
            
189
            if end != None: row_nums = xrange(rows_start, end)
190
            else: row_nums = itertools.count(rows_start)
191
            for i in row_nums:
192
                try: row = rows.next()
193
                except StopIteration: break # no more rows
194
                if i < start: continue # not at start row yet
195
                
196
                process_row(row, i)
197
                row_ready(i, row)
198
            row_ct = i-start
199
            return row_ct
200
        
201
        def map_rows(get_value, rows, **kw_args):
202
            '''Maps input rows
203
            @param get_value(in_, row):str
204
            '''
205
            def process_row(row, i):
206
                row_id = str(i)
207
                for in_, out in mappings:
208
                    value = metadata_value(in_)
209
                    if value == None:
210
                        log_debug('Getting '+str(in_))
211
                        value = cleanup(get_value(in_, row))
212
                    if value != None:
213
                        log_debug('Putting '+str(out))
214
                        xpath.put_obj(root, out, row_id, has_types, value)
215
            return process_rows(process_row, rows, **kw_args)
216
        
217
        def map_table(col_names, rows, **kw_args):
218
            col_names_ct = len(col_names)
219
            col_idxs = util.list_flip(col_names)
220
            
221
            i = 0
222
            while i < len(mappings): # mappings len changes in loop
223
                in_, out = mappings[i]
224
                if metadata_value(in_) == None:
225
                    try: mappings[i] = (
226
                        get_with_prefix(col_idxs, prefixes, in_), out)
227
                    except KeyError:
228
                        del mappings[i]
229
                        continue # keep i the same
230
                i += 1
231
            
232
            def get_value(in_, row):
233
                return util.coalesce(*util.list_subset(row.list, in_))
234
            def wrap_row(row):
235
                return util.ListDict(util.list_as_length(row, col_names_ct),
236
                    col_names, col_idxs) # handle CSV rows of different lengths
237
            
238
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
239
        
240
        stdin = streams.LineCountStream(sys.stdin)
241
        def on_error(e):
242
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
243
            ex_tracker.track(e)
244
        
245
        if in_is_db:
246
            assert in_is_xpaths
247
            
248
            in_db = connect_db(in_db_config)
249
            cur = sql.select(in_db, table=in_root, limit=n, start=start)
250
            row_ct = map_table(list(sql.col_names(cur)), sql.rows(cur),
251
                rows_start=start) # rows_start: pre-start rows have been skipped
252
            
253
            in_db.db.close()
254
        elif in_is_xml:
255
            def get_rows(doc2rows):
256
                return iters.flatten(itertools.imap(doc2rows,
257
                    xml_parse.docs_iter(stdin, on_error)))
258
            
259
            if map_path == None:
260
                def doc2rows(in_xml_root):
261
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
262
                    util.skip(iter_, xml_dom.is_text) # skip metadata
263
                    return iter_
264
                
265
                row_ct = process_rows(lambda row, i: root.appendChild(row),
266
                    get_rows(doc2rows))
267
            else:
268
                def doc2rows(in_xml_root):
269
                    rows = xpath.get(in_xml_root, in_root, limit=end)
270
                    if rows == []: raise SystemExit('Map error: Root "'
271
                        +in_root+'" not found in input')
272
                    return rows
273
                
274
                def get_value(in_, row):
275
                    in_ = './{'+(','.join(strings.with_prefixes(
276
                        ['']+prefixes, in_)))+'}' # also with no prefix
277
                    nodes = xpath.get(row, in_, allow_rooted=False)
278
                    if nodes != []: return xml_dom.value(nodes[0])
279
                    else: return None
280
                
281
                row_ct = map_rows(get_value, get_rows(doc2rows))
282
        else: # input is CSV
283
            map_ = dict(mappings)
284
            reader, col_names = csvs.reader_and_header(sys.stdin)
285
            row_ct = map_table(col_names, reader)
286
        
287
        return row_ct
288
    
289
    def process_inputs(root, row_ready):
290
        row_ct = 0
291
        for map_path in map_paths:
292
            row_ct += process_input(root, row_ready, map_path)
293
        return row_ct
294
    
295
    pool.share_vars(locals())
296
    if out_is_db:
297
        import db_xml
298
        
299
        out_db = connect_db(out_db_config)
300
        try:
301
            if redo: sql.empty_db(out_db)
302
            row_ins_ct_ref = [0]
303
            pool.share_vars(locals())
304
            
305
            def row_ready(row_num, input_row):
306
                def on_error(e):
307
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num+1))
308
                        # row # is interally 0-based, but 1-based to the user
309
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
310
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
311
                    ex_tracker.track(e, row_num)
312
                pool.share_vars(locals())
313
                
314
                xml_func.process(root, on_error)
315
                if not xml_dom.is_empty(root):
316
                    assert xml_dom.has_one_child(root)
317
                    try:
318
                        sql.with_savepoint(out_db,
319
                            lambda: db_xml.put(out_db, root.firstChild,
320
                                row_ins_ct_ref, on_error))
321
                        if commit: out_db.db.commit()
322
                    except sql.DatabaseErrors, e: on_error(e)
323
                prep_root()
324
            
325
            row_ct = process_inputs(root, row_ready)
326
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
327
                ' new rows into database\n')
328
            
329
            # Consume asynchronous tasks
330
            pool.main_loop()
331
        finally:
332
            out_db.db.rollback()
333
            out_db.db.close()
334
    else:
335
        def on_error(e): ex_tracker.track(e)
336
        def row_ready(row_num, input_row): pass
337
        row_ct = process_inputs(root, row_ready)
338
        xml_func.process(root, on_error)
339
        if out_is_xml_ref[0]:
340
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
341
        else: # output is CSV
342
            raise NotImplementedError('CSV output not supported yet')
343
    
344
    # Consume any asynchronous tasks not already consumed above
345
    pool.main_loop()
346
    
347
    profiler.stop(row_ct)
348
    ex_tracker.add_iters(row_ct)
349
    if verbose:
350
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
351
        sys.stderr.write(profiler.msg()+'\n')
352
        sys.stderr.write(ex_tracker.msg()+'\n')
353
    ex_tracker.exit()
354

    
355
def main():
356
    try: main_()
357
    except Parser.SyntaxError, e: raise SystemExit(str(e))
358

    
359
if __name__ == '__main__':
360
    profile_to = opts.get_env_var('profile_to', None)
361
    if profile_to != None:
362
        import cProfile
363
        sys.stderr.write('Profiling to '+profile_to+'\n')
364
        cProfile.run(main.func_code, profile_to)
365
    else: main()
(25-25/46)