Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]')
55
    
56
    ## Get config from env vars
57
    
58
    # Modes
59
    test = opts.env_flag('test', False, env_names)
60
    commit = opts.env_flag('commit', False, env_names) and not test
61
        # never commit in test mode
62
    redo = opts.env_flag('redo', test, env_names) and not commit
63
        # never redo in commit mode (manually run `make empty_db` instead)
64
    
65
    # Ranges
66
    start = util.cast(int, opts.get_env_var('start', '0', env_names))
67
    if test: end_default = 1
68
    else: end_default = None
69
    end = util.cast(int, util.none_if(
70
        opts.get_env_var('n', end_default, env_names), u''))
71
    if end != None: end += start
72
    
73
    # Optimization
74
    if test: cpus_default = 0
75
    else: cpus_default = None
76
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
77
        env_names), u''))
78
    
79
    # Debugging
80
    debug = opts.env_flag('debug', False, env_names)
81
    sql.run_raw_query.debug = debug
82
    verbose = debug or opts.env_flag('verbose', not test, env_names)
83
    opts.get_env_var('profile_to', None, env_names) # add to env_names
84
    
85
    # DB
86
    db_config_names = ['engine', 'host', 'user', 'password', 'database']
87
    def get_db_config(prefix):
88
        return opts.get_env_vars(db_config_names, prefix, env_names)
89
    in_db_config = get_db_config('in')
90
    out_db_config = get_db_config('out')
91
    in_is_db = 'engine' in in_db_config
92
    out_is_db = 'engine' in out_db_config
93
    
94
    ##
95
    
96
    # Logging
97
    def log(msg, on=verbose):
98
        if on: sys.stderr.write(msg)
99
    def log_start(action, on=verbose): log(action+'...\n', on)
100
    
101
    # Parse args
102
    map_paths = sys.argv[1:]
103
    if map_paths == []:
104
        if in_is_db or not out_is_db: usage_err()
105
        else: map_paths = [None]
106
    
107
    def connect_db(db_config):
108
        log_start('Connecting to '+sql.db_config_str(db_config))
109
        return sql.connect(db_config, debug=debug)
110
    
111
    if end != None: end_str = str(end-1)
112
    else: end_str = 'end'
113
    log_start('Processing input rows '+str(start)+'-'+end_str)
114
    
115
    ex_tracker = exc.ExPercentTracker(iter_text='row')
116
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
117
    
118
    # Parallel processing
119
    pool = parallel.MultiProducerPool(cpus)
120
    log_start('Using '+str(pool.process_ct)+' parallel CPUs')
121
    
122
    doc = xml_dom.create_doc()
123
    root = doc.documentElement
124
    out_is_xml_ref = [False]
125
    in_label_ref = [None]
126
    def update_in_label():
127
        if in_label_ref[0] != None:
128
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
129
    def prep_root():
130
        root.clear()
131
        update_in_label()
132
    prep_root()
133
    
134
    def process_input(root, row_ready, map_path):
135
        '''Inputs datasource to XML tree, mapping if needed'''
136
        # Load map header
137
        in_is_xpaths = True
138
        out_is_xpaths = True
139
        out_label = None
140
        if map_path != None:
141
            metadata = []
142
            mappings = []
143
            stream = open(map_path, 'rb')
144
            reader = csv.reader(stream)
145
            in_label, out_label = reader.next()[:2]
146
            
147
            def split_col_name(name):
148
                label, sep, root = name.partition(':')
149
                label, sep2, prefixes_str = label.partition('[')
150
                prefixes_str = strings.remove_suffix(']', prefixes_str)
151
                prefixes = strings.split(',', prefixes_str)
152
                return label, sep != '', root, prefixes
153
                    # extract datasrc from "datasrc[data_format]"
154
            
155
            in_label, in_root, prefixes = maps.col_info(in_label)
156
            in_is_xpaths = in_root != None
157
            in_label_ref[0] = in_label
158
            update_in_label()
159
            out_label, out_root = maps.col_info(out_label)[:2]
160
            out_is_xpaths = out_root != None
161
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
162
                # outer elements are types
163
            
164
            for row in reader:
165
                in_, out = row[:2]
166
                if out != '':
167
                    if out_is_xpaths: out = xpath.parse(out_root+out)
168
                    mappings.append((in_, out))
169
            
170
            stream.close()
171
            
172
            root.ownerDocument.documentElement.tagName = out_label
173
        in_is_xml = in_is_xpaths and not in_is_db
174
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
175
        
176
        def process_rows(process_row, rows, rows_start=0):
177
            '''Processes input rows      
178
            @param process_row(in_row, i)
179
            @rows_start The row # of the first row in rows. Set this only if the
180
                pre-start rows have already been skipped.
181
            '''
182
            rows = iter(rows)
183
            
184
            if end != None: row_nums = xrange(rows_start, end)
185
            else: row_nums = itertools.count(rows_start)
186
            for i in row_nums:
187
                try: row = rows.next()
188
                except StopIteration: break # no more rows
189
                if i < start: continue # not at start row yet
190
                
191
                process_row(row, i)
192
                row_ready(i, row)
193
            row_ct = i-start
194
            return row_ct
195
        
196
        def map_rows(get_value, rows, **kw_args):
197
            '''Maps input rows
198
            @param get_value(in_, row):str
199
            '''
200
            def process_row(row, i):
201
                row_id = str(i)
202
                for in_, out in mappings:
203
                    value = metadata_value(in_)
204
                    if value == None:
205
                        log_start('Getting '+str(in_), debug)
206
                        value = cleanup(get_value(in_, row))
207
                    if value != None:
208
                        log_start('Putting '+str(out), debug)
209
                        xpath.put_obj(root, out, row_id, has_types, value)
210
            return process_rows(process_row, rows, **kw_args)
211
        
212
        def map_table(col_names, rows, **kw_args):
213
            col_names_ct = len(col_names)
214
            col_idxs = util.list_flip(col_names)
215
            
216
            i = 0
217
            while i < len(mappings): # mappings len changes in loop
218
                in_, out = mappings[i]
219
                if metadata_value(in_) == None:
220
                    try: mappings[i] = (
221
                        get_with_prefix(col_idxs, prefixes, in_), out)
222
                    except KeyError:
223
                        del mappings[i]
224
                        continue # keep i the same
225
                i += 1
226
            
227
            def get_value(in_, row):
228
                return util.coalesce(*util.list_subset(row.list, in_))
229
            def wrap_row(row):
230
                return util.ListDict(util.list_as_length(row, col_names_ct),
231
                    col_names, col_idxs) # handle CSV rows of different lengths
232
            
233
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
234
        
235
        stdin = streams.LineCountStream(sys.stdin)
236
        def on_error(e):
237
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
238
            ex_tracker.track(e)
239
        
240
        if in_is_db:
241
            assert in_is_xpaths
242
            
243
            in_db = connect_db(in_db_config)
244
            cur = sql.select(in_db, table=in_root, limit=end, start=start)
245
            row_ct = map_table(list(sql.col_names(cur)), sql.rows(cur),
246
                rows_start=start) # rows_start: pre-start rows have been skipped
247
            
248
            in_db.db.close()
249
        elif in_is_xml:
250
            def get_rows(doc2rows):
251
                return iters.flatten(itertools.imap(doc2rows,
252
                    xml_parse.docs_iter(stdin, on_error)))
253
            
254
            if map_path == None:
255
                def doc2rows(in_xml_root):
256
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
257
                    util.skip(iter_, xml_dom.is_text) # skip metadata
258
                    return iter_
259
                
260
                row_ct = process_rows(lambda row, i: root.appendChild(row),
261
                    get_rows(doc2rows))
262
            else:
263
                def doc2rows(in_xml_root):
264
                    rows = xpath.get(in_xml_root, in_root, limit=end)
265
                    if rows == []: raise SystemExit('Map error: Root "'
266
                        +in_root+'" not found in input')
267
                    return rows
268
                
269
                def get_value(in_, row):
270
                    in_ = './{'+(','.join(strings.with_prefixes(
271
                        ['']+prefixes, in_)))+'}' # also with no prefix
272
                    nodes = xpath.get(row, in_, allow_rooted=False)
273
                    if nodes != []: return xml_dom.value(nodes[0])
274
                    else: return None
275
                
276
                row_ct = map_rows(get_value, get_rows(doc2rows))
277
        else: # input is CSV
278
            map_ = dict(mappings)
279
            reader, col_names = csvs.reader_and_header(sys.stdin)
280
            row_ct = map_table(col_names, reader)
281
        
282
        return row_ct
283
    
284
    def process_inputs(root, row_ready):
285
        row_ct = 0
286
        for map_path in map_paths:
287
            row_ct += process_input(root, row_ready, map_path)
288
        return row_ct
289
    
290
    pool.share_vars(locals())
291
    if out_is_db:
292
        import db_xml
293
        
294
        out_db = connect_db(out_db_config)
295
        try:
296
            if redo: sql.empty_db(out_db)
297
            row_ins_ct_ref = [0]
298
            pool.share_vars(locals())
299
            
300
            def row_ready(row_num, input_row):
301
                def on_error(e):
302
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num))
303
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
304
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
305
                    ex_tracker.track(e, row_num)
306
                pool.share_vars(locals())
307
                
308
                xml_func.process(root, on_error)
309
                if not xml_dom.is_empty(root):
310
                    assert xml_dom.has_one_child(root)
311
                    try:
312
                        sql.with_savepoint(out_db,
313
                            lambda: db_xml.put(out_db, root.firstChild,
314
                                row_ins_ct_ref, on_error))
315
                        if commit: out_db.db.commit()
316
                    except sql.DatabaseErrors, e: on_error(e)
317
                prep_root()
318
            
319
            row_ct = process_inputs(root, row_ready)
320
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
321
                ' new rows into database\n')
322
            
323
            # Consume asynchronous tasks
324
            pool.main_loop()
325
        finally:
326
            out_db.db.rollback()
327
            out_db.db.close()
328
    else:
329
        def on_error(e): ex_tracker.track(e)
330
        def row_ready(row_num, input_row): pass
331
        row_ct = process_inputs(root, row_ready)
332
        xml_func.process(root, on_error)
333
        if out_is_xml_ref[0]:
334
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
335
        else: # output is CSV
336
            raise NotImplementedError('CSV output not supported yet')
337
    
338
    # Consume any asynchronous tasks not already consumed above
339
    pool.main_loop()
340
    
341
    profiler.stop(row_ct)
342
    ex_tracker.add_iters(row_ct)
343
    if verbose:
344
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
345
        sys.stderr.write(profiler.msg()+'\n')
346
        sys.stderr.write(ex_tracker.msg()+'\n')
347
    ex_tracker.exit()
348

    
349
def main():
350
    try: main_()
351
    except Parser.SyntaxError, e: raise SystemExit(str(e))
352

    
353
if __name__ == '__main__':
354
    profile_to = opts.get_env_var('profile_to', None)
355
    if profile_to != None:
356
        import cProfile
357
        sys.stderr.write('Profiling to '+profile_to+'\n')
358
        cProfile.run(main.func_code, profile_to)
359
    else: main()
(23-23/43)