Project

General

Profile

1
#!/usr/bin/env python
2
# Maps one datasource to another, using a map spreadsheet if needed
3
# Exit status is the # of errors in the import, up to the maximum exit status
4
# For outputting an XML file to a PostgreSQL database, use the general format of
5
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
6

    
7
import csv
8
import itertools
9
import os.path
10
import sys
11
import xml.dom.minidom as minidom
12

    
13
sys.path.append(os.path.dirname(__file__)+"/../lib")
14

    
15
import csvs
16
import exc
17
import iters
18
import maps
19
import opts
20
import parallel
21
import Parser
22
import profiling
23
import sql
24
import streams
25
import strings
26
import term
27
import util
28
import xpath
29
import xml_dom
30
import xml_func
31
import xml_parse
32

    
33
def get_with_prefix(map_, prefixes, key):
34
    '''Gets all entries for the given key with any of the given prefixes'''
35
    values = []
36
    for key_ in strings.with_prefixes(['']+prefixes, key): # also with no prefix
37
        try: value = map_[key_]
38
        except KeyError, e: continue # keep going
39
        values.append(value)
40
    
41
    if values != []: return values
42
    else: raise e # re-raise last KeyError
43

    
44
def metadata_value(name): return None # this feature has been removed
45

    
46
def cleanup(val):
47
    if val == None: return val
48
    return util.none_if(strings.cleanup(strings.ustr(val)), u'', u'\\N')
49

    
50
def main_():
51
    env_names = []
52
    def usage_err():
53
        raise SystemExit('Usage: '+opts.env_usage(env_names, True)+' '
54
            +sys.argv[0]+' [map_path...] [<input] [>output]\n'
55
            'Note: Row #s start with 1')
56
    
57
    ## Get config from env vars
58
    
59
    # Modes
60
    test = opts.env_flag('test', False, env_names)
61
    commit = opts.env_flag('commit', False, env_names) and not test
62
        # never commit in test mode
63
    redo = opts.env_flag('redo', test, env_names) and not commit
64
        # never redo in commit mode (manually run `make empty_db` instead)
65
    
66
    # Ranges
67
    start = util.cast(int, opts.get_env_var('start', 1, env_names)) # 1-based
68
    # Make start interally 0-based.
69
    # It's 1-based to the user to match up with the staging table row #s.
70
    start -= 1
71
    if test: n_default = 1
72
    else: n_default = None
73
    n = util.cast(int, util.none_if(opts.get_env_var('n', n_default, env_names),
74
        u''))
75
    end = n
76
    if end != None: end += start
77
    
78
    # Debugging
79
    debug = opts.env_flag('debug', False, env_names)
80
    sql.run_raw_query.debug = debug
81
    verbose = debug or opts.env_flag('verbose', not test, env_names)
82
    opts.get_env_var('profile_to', None, env_names) # add to env_names
83
    
84
    # DB
85
    def get_db_config(prefix):
86
        return opts.get_env_vars(sql.db_config_names, prefix, env_names)
87
    in_db_config = get_db_config('in')
88
    out_db_config = get_db_config('out')
89
    in_is_db = 'engine' in in_db_config
90
    out_is_db = 'engine' in out_db_config
91
    in_schema = opts.get_env_var('in_schema', None, env_names)
92
    in_table = opts.get_env_var('in_table', None, env_names)
93
    
94
    # Optimization
95
    by_col = in_db_config == out_db_config and opts.env_flag('by_col', False,
96
        env_names) # by-column optimization only applies if mapping to same DB
97
    if test: cpus_default = 0
98
    else: cpus_default = None
99
    cpus = util.cast(int, util.none_if(opts.get_env_var('cpus', cpus_default,
100
        env_names), u''))
101
    
102
    ##
103
    
104
    # Logging
105
    def log(msg, on=verbose):
106
        if on: sys.stderr.write(msg+'\n')
107
    if debug: log_debug = lambda msg: log(msg, debug)
108
    else: log_debug = sql.log_debug_none
109
    
110
    # Parse args
111
    map_paths = sys.argv[1:]
112
    if map_paths == []:
113
        if in_is_db or not out_is_db: usage_err()
114
        else: map_paths = [None]
115
    
116
    def connect_db(db_config):
117
        log('Connecting to '+sql.db_config_str(db_config))
118
        return sql.connect(db_config, log_debug=log_debug)
119
    
120
    if end != None: end_str = str(end-1) # end is one past the last #
121
    else: end_str = 'end'
122
    log('Processing input rows '+str(start)+'-'+end_str)
123
    
124
    ex_tracker = exc.ExPercentTracker(iter_text='row')
125
    profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
126
    
127
    # Parallel processing
128
    pool = parallel.MultiProducerPool(cpus)
129
    log('Using '+str(pool.process_ct)+' parallel CPUs')
130
    
131
    doc = xml_dom.create_doc()
132
    root = doc.documentElement
133
    out_is_xml_ref = [False]
134
    in_label_ref = [None]
135
    def update_in_label():
136
        if in_label_ref[0] != None:
137
            xpath.get(root, '/_ignore/inLabel="'+in_label_ref[0]+'"', True)
138
    def prep_root():
139
        root.clear()
140
        update_in_label()
141
    prep_root()
142
    
143
    def process_input(root, row_ready, map_path):
144
        '''Inputs datasource to XML tree, mapping if needed'''
145
        # Load map header
146
        in_is_xpaths = True
147
        out_is_xpaths = True
148
        out_label = None
149
        if map_path != None:
150
            metadata = []
151
            mappings = []
152
            stream = open(map_path, 'rb')
153
            reader = csv.reader(stream)
154
            in_label, out_label = reader.next()[:2]
155
            
156
            def split_col_name(name):
157
                label, sep, root = name.partition(':')
158
                label, sep2, prefixes_str = label.partition('[')
159
                prefixes_str = strings.remove_suffix(']', prefixes_str)
160
                prefixes = strings.split(',', prefixes_str)
161
                return label, sep != '', root, prefixes
162
                    # extract datasrc from "datasrc[data_format]"
163
            
164
            in_label, in_root, prefixes = maps.col_info(in_label)
165
            in_is_xpaths = in_root != None
166
            in_label_ref[0] = in_label
167
            update_in_label()
168
            out_label, out_root = maps.col_info(out_label)[:2]
169
            out_is_xpaths = out_root != None
170
            if out_is_xpaths: has_types = out_root.find('/*s/') >= 0
171
                # outer elements are types
172
            
173
            for row in reader:
174
                in_, out = row[:2]
175
                if out != '':
176
                    if out_is_xpaths: out = xpath.parse(out_root+out)
177
                    mappings.append((in_, out))
178
            
179
            stream.close()
180
            
181
            root.ownerDocument.documentElement.tagName = out_label
182
        in_is_xml = in_is_xpaths and not in_is_db
183
        out_is_xml_ref[0] = out_is_xpaths and not out_is_db
184
        
185
        def process_rows(process_row, rows, rows_start=0):
186
            '''Processes input rows      
187
            @param process_row(in_row, i)
188
            @rows_start The (0-based) row # of the first row in rows. Set this
189
                only if the pre-start rows have already been skipped.
190
            '''
191
            rows = iter(rows)
192
            
193
            if end != None: row_nums = xrange(rows_start, end)
194
            else: row_nums = itertools.count(rows_start)
195
            for i in row_nums:
196
                try: row = rows.next()
197
                except StopIteration: break # no more rows
198
                if i < start: continue # not at start row yet
199
                
200
                process_row(row, i)
201
                row_ready(i, row)
202
            row_ct = i-start
203
            return row_ct
204
        
205
        def map_rows(get_value, rows, **kw_args):
206
            '''Maps input rows
207
            @param get_value(in_, row):str
208
            '''
209
            def process_row(row, i):
210
                row_id = str(i)
211
                for in_, out in mappings:
212
                    value = metadata_value(in_)
213
                    if value == None:
214
                        log_debug('Getting '+str(in_))
215
                        value = cleanup(get_value(in_, row))
216
                    if value != None:
217
                        log_debug('Putting '+str(out))
218
                        xpath.put_obj(root, out, row_id, has_types, value)
219
            return process_rows(process_row, rows, **kw_args)
220
        
221
        def map_table(col_names, rows, **kw_args):
222
            col_names_ct = len(col_names)
223
            col_idxs = util.list_flip(col_names)
224
            
225
            i = 0
226
            while i < len(mappings): # mappings len changes in loop
227
                in_, out = mappings[i]
228
                if metadata_value(in_) == None:
229
                    try: mappings[i] = (
230
                        get_with_prefix(col_idxs, prefixes, in_), out)
231
                    except KeyError:
232
                        del mappings[i]
233
                        continue # keep i the same
234
                i += 1
235
            
236
            def get_value(in_, row):
237
                return util.coalesce(*util.list_subset(row.list, in_))
238
            def wrap_row(row):
239
                return util.ListDict(util.list_as_length(row, col_names_ct),
240
                    col_names, col_idxs) # handle CSV rows of different lengths
241
            
242
            return map_rows(get_value, util.WrapIter(wrap_row, rows), **kw_args)
243
        
244
        stdin = streams.LineCountStream(sys.stdin)
245
        def on_error(e):
246
            exc.add_msg(e, term.emph('input line #:')+' '+str(stdin.line_num))
247
            ex_tracker.track(e)
248
        
249
        if in_is_db:
250
            in_db = connect_db(in_db_config)
251
            
252
            # Get table and schema name
253
            schema = in_schema # modified, so can't have same name as outer var
254
            table = in_table # modified, so can't have same name as outer var
255
            if table == None:
256
                assert in_is_xpaths
257
                schema, sep, table = in_root.partition('.')
258
                if sep == '': # only the table name was specified
259
                    table = schema
260
                    schema = None
261
            table_is_esc = False
262
            if schema != None:
263
                table = sql.qual_name(in_db, schema, table)
264
                table_is_esc = True
265
            
266
            if by_col:
267
                # Mapping to same DB and by-column optimization enabled
268
                raise NotImplementedError(
269
                    'By-column optimization not available yet')
270
            else:
271
                # Use normal by-row method
272
                cur = sql.select(in_db, table, limit=n, start=start,
273
                    table_is_esc=table_is_esc)
274
                row_ct = map_table(list(sql.col_names(cur)), sql.rows(cur),
275
                    rows_start=start) # rows_start: pre-start rows were skipped
276
            
277
            in_db.db.close()
278
        elif in_is_xml:
279
            def get_rows(doc2rows):
280
                return iters.flatten(itertools.imap(doc2rows,
281
                    xml_parse.docs_iter(stdin, on_error)))
282
            
283
            if map_path == None:
284
                def doc2rows(in_xml_root):
285
                    iter_ = xml_dom.NodeElemIter(in_xml_root)
286
                    util.skip(iter_, xml_dom.is_text) # skip metadata
287
                    return iter_
288
                
289
                row_ct = process_rows(lambda row, i: root.appendChild(row),
290
                    get_rows(doc2rows))
291
            else:
292
                def doc2rows(in_xml_root):
293
                    rows = xpath.get(in_xml_root, in_root, limit=end)
294
                    if rows == []: raise SystemExit('Map error: Root "'
295
                        +in_root+'" not found in input')
296
                    return rows
297
                
298
                def get_value(in_, row):
299
                    in_ = './{'+(','.join(strings.with_prefixes(
300
                        ['']+prefixes, in_)))+'}' # also with no prefix
301
                    nodes = xpath.get(row, in_, allow_rooted=False)
302
                    if nodes != []: return xml_dom.value(nodes[0])
303
                    else: return None
304
                
305
                row_ct = map_rows(get_value, get_rows(doc2rows))
306
        else: # input is CSV
307
            map_ = dict(mappings)
308
            reader, col_names = csvs.reader_and_header(sys.stdin)
309
            row_ct = map_table(col_names, reader)
310
        
311
        return row_ct
312
    
313
    def process_inputs(root, row_ready):
314
        row_ct = 0
315
        for map_path in map_paths:
316
            row_ct += process_input(root, row_ready, map_path)
317
        return row_ct
318
    
319
    pool.share_vars(locals())
320
    if out_is_db:
321
        import db_xml
322
        
323
        out_db = connect_db(out_db_config)
324
        try:
325
            if redo: sql.empty_db(out_db)
326
            row_ins_ct_ref = [0]
327
            pool.share_vars(locals())
328
            
329
            def row_ready(row_num, input_row):
330
                def on_error(e):
331
                    exc.add_msg(e, term.emph('row #:')+' '+str(row_num+1))
332
                        # row # is interally 0-based, but 1-based to the user
333
                    exc.add_msg(e, term.emph('input row:')+'\n'+str(input_row))
334
                    exc.add_msg(e, term.emph('output row:')+'\n'+str(root))
335
                    ex_tracker.track(e, row_num)
336
                pool.share_vars(locals())
337
                
338
                xml_func.process(root, on_error)
339
                if not xml_dom.is_empty(root):
340
                    assert xml_dom.has_one_child(root)
341
                    try:
342
                        sql.with_savepoint(out_db,
343
                            lambda: db_xml.put(out_db, root.firstChild,
344
                                row_ins_ct_ref, on_error))
345
                        if commit: out_db.db.commit()
346
                    except sql.DatabaseErrors, e: on_error(e)
347
                prep_root()
348
            
349
            row_ct = process_inputs(root, row_ready)
350
            sys.stdout.write('Inserted '+str(row_ins_ct_ref[0])+
351
                ' new rows into database\n')
352
            
353
            # Consume asynchronous tasks
354
            pool.main_loop()
355
        finally:
356
            out_db.db.rollback()
357
            out_db.db.close()
358
    else:
359
        def on_error(e): ex_tracker.track(e)
360
        def row_ready(row_num, input_row): pass
361
        row_ct = process_inputs(root, row_ready)
362
        xml_func.process(root, on_error)
363
        if out_is_xml_ref[0]:
364
            doc.writexml(sys.stdout, **xml_dom.prettyxml_config)
365
        else: # output is CSV
366
            raise NotImplementedError('CSV output not supported yet')
367
    
368
    # Consume any asynchronous tasks not already consumed above
369
    pool.main_loop()
370
    
371
    profiler.stop(row_ct)
372
    ex_tracker.add_iters(row_ct)
373
    if verbose:
374
        sys.stderr.write('Processed '+str(row_ct)+' input rows\n')
375
        sys.stderr.write(profiler.msg()+'\n')
376
        sys.stderr.write(ex_tracker.msg()+'\n')
377
    ex_tracker.exit()
378

    
379
def main():
380
    try: main_()
381
    except Parser.SyntaxError, e: raise SystemExit(str(e))
382

    
383
if __name__ == '__main__':
384
    profile_to = opts.get_env_var('profile_to', None)
385
    if profile_to != None:
386
        import cProfile
387
        sys.stderr.write('Profiling to '+profile_to+'\n')
388
        cProfile.run(main.func_code, profile_to)
389
    else: main()
(25-25/47)