1
|
#!/usr/bin/env python
|
2
|
# Maps one datasource to another, using a map spreadsheet if needed
|
3
|
# For outputting an XML file to a PostgreSQL database, use the general format of
|
4
|
# http://vegbank.org/vegdocs/xml/vegbank_example_ver1.0.2.xml
|
5
|
|
6
|
import os.path
|
7
|
import sys
|
8
|
import xml.dom.minidom
|
9
|
|
10
|
sys.path.append(os.path.dirname(__file__)+"/../lib")
|
11
|
|
12
|
import opts
|
13
|
from Parser import SyntaxException
|
14
|
import sql
|
15
|
import xml_dom
|
16
|
import xml_func
|
17
|
|
18
|
def metadata_value(name):
|
19
|
if type(name) == str and name.startswith(':'): return name[1:]
|
20
|
else: return None
|
21
|
|
22
|
def main():
|
23
|
env_names = []
|
24
|
def usage_err():
|
25
|
raise SystemExit('Usage: '+opts.env_usage(env_names, True)
|
26
|
+' [commit=1] '+sys.argv[0]+' [map_path] [<input] [>output]')
|
27
|
limit = opts.get_env_var('n', None, env_names)
|
28
|
if limit != None: limit = int(limit)
|
29
|
commit = opts.env_flag('commit')
|
30
|
|
31
|
# Get db config from env vars
|
32
|
db_config_names = ['engine', 'host', 'user', 'password', 'database']
|
33
|
def get_db_config(prefix):
|
34
|
return opts.get_env_vars(db_config_names, prefix, env_names)
|
35
|
in_db_config = get_db_config('in')
|
36
|
out_db_config = get_db_config('out')
|
37
|
in_is_db = 'engine' in in_db_config
|
38
|
out_is_db = 'engine' in out_db_config
|
39
|
|
40
|
# Parse args
|
41
|
map_path = None
|
42
|
try: _prog_name, map_path = sys.argv
|
43
|
except ValueError:
|
44
|
if in_is_db: usage_err()
|
45
|
|
46
|
# Load map header
|
47
|
in_is_xpaths = True
|
48
|
if map_path != None:
|
49
|
import copy
|
50
|
import csv
|
51
|
|
52
|
import xpath
|
53
|
|
54
|
metadata = []
|
55
|
mappings = []
|
56
|
stream = open(map_path, 'rb')
|
57
|
reader = csv.reader(stream)
|
58
|
in_label, out_label = reader.next()[:2]
|
59
|
def split_col_name(name):
|
60
|
name, sep, root = name.partition(':')
|
61
|
return name, sep != '', root
|
62
|
in_label, in_is_xpaths, in_root = split_col_name(in_label)
|
63
|
out_label, out_is_xpaths, out_root = split_col_name(out_label)
|
64
|
assert out_is_xpaths # CSV output not supported yet
|
65
|
has_types = out_root.startswith('/*s/') # outer elements are types
|
66
|
for row in reader:
|
67
|
in_, out = row[:2]
|
68
|
if out != '':
|
69
|
if out_is_xpaths: out = out_root+out
|
70
|
mappings.append((in_, out))
|
71
|
stream.close()
|
72
|
in_is_xml = in_is_xpaths and not in_is_db
|
73
|
|
74
|
# Input datasource to XML tree, mapping if needed
|
75
|
if in_is_xml:
|
76
|
doc0 = xml.dom.minidom.parse(sys.stdin)
|
77
|
if map_path != None:
|
78
|
doc1 = xml_dom.create_doc(out_label)
|
79
|
root = doc1.documentElement
|
80
|
if in_is_db:
|
81
|
assert in_is_xpaths
|
82
|
|
83
|
import db_xml
|
84
|
|
85
|
in_root_xml = xpath.path2xml(in_root)
|
86
|
for i, mapping in enumerate(mappings):
|
87
|
in_, out = mapping
|
88
|
if metadata_value(in_) == None:
|
89
|
mappings[i] = (xpath.path2xml(in_root+'/'+in_), out)
|
90
|
|
91
|
in_db = sql.connect(in_db_config)
|
92
|
in_pkeys = {}
|
93
|
for row_idx, row in enumerate(sql.rows(db_xml.get(in_db,
|
94
|
in_root_xml, in_pkeys, limit))):
|
95
|
row_id = str(row_idx)
|
96
|
pkey, = row
|
97
|
for in_, out in mappings:
|
98
|
value = metadata_value(in_)
|
99
|
if value == None:
|
100
|
in_ = in_.cloneNode(True) # don't modify orig value!
|
101
|
xml_dom.set_id(xpath.get(in_, in_root), pkey)
|
102
|
value = sql.value_or_none(db_xml.get(in_db, in_,
|
103
|
in_pkeys))
|
104
|
if value != None:
|
105
|
xpath.put_obj(root, out, row_id, has_types, str(value))
|
106
|
in_db.close()
|
107
|
elif in_is_xml:
|
108
|
row = xpath.get(doc0.documentElement, in_root)
|
109
|
for row_idx, row in enumerate(xml_dom.NodeElemIter(row.parentNode)):
|
110
|
if not (limit == None or row_idx < limit): break
|
111
|
row_id = str(row_idx)
|
112
|
for in_, out in mappings:
|
113
|
value = metadata_value(in_)
|
114
|
if value == None:
|
115
|
node = xpath.get(row, in_)
|
116
|
if node != None: value = xml_dom.value(node)
|
117
|
if value != None:
|
118
|
xpath.put_obj(root, out, row_id, has_types, value)
|
119
|
else: # input is CSV
|
120
|
map_ = dict(mappings)
|
121
|
reader = csv.reader(sys.stdin)
|
122
|
cols = reader.next()
|
123
|
col_idxs = dict([(value, idx) for idx, value in enumerate(cols)])
|
124
|
for i, mapping in enumerate(mappings):
|
125
|
in_, out = mapping
|
126
|
if metadata_value(in_) == None:
|
127
|
try: mappings[i] = (col_idxs[in_], out)
|
128
|
except KeyError: pass
|
129
|
|
130
|
for row_idx, row in enumerate(reader):
|
131
|
if not (limit == None or row_idx < limit): break
|
132
|
row_id = str(row_idx)
|
133
|
for in_, out in mappings:
|
134
|
value = metadata_value(in_)
|
135
|
if value == None:
|
136
|
value = row[in_]
|
137
|
if value == '': value = None
|
138
|
if value != None:
|
139
|
xpath.put_obj(root, out, row_id, has_types, value)
|
140
|
xml_func.process(root)
|
141
|
else: doc1 = doc0
|
142
|
|
143
|
# Output XML tree
|
144
|
if out_is_db:
|
145
|
from psycopg2.extensions import ISOLATION_LEVEL_SERIALIZABLE
|
146
|
import db_xml
|
147
|
|
148
|
out_db = sql.connect(out_db_config)
|
149
|
out_db.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
|
150
|
try:
|
151
|
row_ct_ref = [0]
|
152
|
db_xml.xml2db(out_db, doc1.documentElement, row_ct_ref)
|
153
|
print 'Inserted '+str(row_ct_ref[0])+' rows'
|
154
|
if commit: out_db.commit()
|
155
|
finally:
|
156
|
out_db.rollback()
|
157
|
out_db.close()
|
158
|
else: xml_dom.writexml(sys.stdout, doc1) # output is XML
|
159
|
|
160
|
try: main()
|
161
|
except SyntaxException, ex: raise SystemExit(str(ex))
|