1
|
# XML-database conversion
|
2
|
|
3
|
import copy
|
4
|
import re
|
5
|
from xml.dom import Node
|
6
|
|
7
|
import dicts
|
8
|
import exc
|
9
|
import Parser
|
10
|
import sql
|
11
|
import sql_io
|
12
|
import sql_gen
|
13
|
import strings
|
14
|
import util
|
15
|
import xml_dom
|
16
|
import xml_func
|
17
|
import xpath
|
18
|
|
19
|
def name_of(node): return re.sub(r'^.*\.', r'', node.tagName)
|
20
|
|
21
|
ptr_suffix = '_id'
|
22
|
|
23
|
def is_ptr(node_name): return node_name.lower().endswith(ptr_suffix)
|
24
|
|
25
|
def ptr_type_guess(node_name):
|
26
|
assert is_ptr(node_name)
|
27
|
return node_name[:-len(ptr_suffix)]
|
28
|
|
29
|
def ptr_target(node):
|
30
|
assert is_ptr(name_of(node))
|
31
|
return xml_dom.value_node(node)
|
32
|
|
33
|
def find_by_name(node, name):
|
34
|
for parent in xml_dom.NodeParentIter(node):
|
35
|
if name_of(parent) == name: return parent
|
36
|
else:
|
37
|
for child in xml_dom.NodeElemIter(parent):
|
38
|
child_name = name_of(child)
|
39
|
if is_ptr(child_name):
|
40
|
target = ptr_target(child)
|
41
|
if target.tagName == name: return target
|
42
|
elif child_name == name: return child
|
43
|
return None
|
44
|
|
45
|
class ColRef:
|
46
|
'''A reference to a table column'''
|
47
|
def __init__(self, name, idx):
|
48
|
self.name = name
|
49
|
self.idx = idx
|
50
|
|
51
|
def __str__(self): return self.name
|
52
|
|
53
|
input_col_prefix = xml_func.var_name_prefix
|
54
|
|
55
|
put_special_funcs = set(['_setDefault', '_simplifyPath'])
|
56
|
|
57
|
no_parent_ids_loc = object() # tells put() there is no parent_ids_loc
|
58
|
|
59
|
def put(db, node, row_ins_ct_ref=None, on_error=exc.reraise, col_defaults=None,
|
60
|
in_table=None, parent_ids_loc=no_parent_ids_loc, next=None):
|
61
|
'''
|
62
|
@param node To use an entire XML document, pass root.firstChild.
|
63
|
'''
|
64
|
if node == None: return None # when no rows, root.firstChild == None
|
65
|
elif xml_dom.is_text_node(node): return xml_dom.value(node)
|
66
|
|
67
|
if col_defaults == None: col_defaults = {}
|
68
|
|
69
|
def put_(node):
|
70
|
if util.is_str(node): return node
|
71
|
return put(db, node, row_ins_ct_ref, on_error, col_defaults, in_table,
|
72
|
parent_ids_loc, next)
|
73
|
|
74
|
def augment_error(e): exc.add_msg(e, 'node:\n'+strings.ustr(node))
|
75
|
def on_error_(e):
|
76
|
augment_error(e)
|
77
|
on_error(e)
|
78
|
|
79
|
def wrap_e(e):
|
80
|
augment_error(e)
|
81
|
raise xml_func.SyntaxError(e)
|
82
|
|
83
|
is_func = xml_func.is_func(node)
|
84
|
out_table = name_of(node)
|
85
|
|
86
|
# Divide children into fields and children with fkeys to parent
|
87
|
row = dicts.OnceOnlyDict()
|
88
|
children = []
|
89
|
try:
|
90
|
for child in xml_dom.NodeElemIter(node):
|
91
|
child_name = name_of(child)
|
92
|
if xml_dom.is_empty(child): row[child_name] = None
|
93
|
elif xml_dom.is_text(child):
|
94
|
row[child_name] = strings.to_unicode(xml_dom.value(child))
|
95
|
else:
|
96
|
child_value = xml_dom.value_node(child)
|
97
|
if ((is_func or is_ptr(child_name)
|
98
|
or xml_func.is_func(child_value))
|
99
|
and not xml_func.is_func(child)):
|
100
|
row[child_name] = child_value
|
101
|
else: children.append(child)
|
102
|
except dicts.KeyExistsError, e: wrap_e(e)
|
103
|
|
104
|
# Special handling for structural XML functions
|
105
|
if out_table == '_setDefault':
|
106
|
# Parse args
|
107
|
try: path = row.pop('path')
|
108
|
except KeyError, e: wrap_e(e)
|
109
|
|
110
|
col_defaults = dicts.MergeDict(dicts.WrapDict(put_, row), col_defaults)
|
111
|
return put_(path)
|
112
|
elif out_table == '_simplifyPath':
|
113
|
# Parse args
|
114
|
try:
|
115
|
next = row['next'] # modifies outer next var used by put_()
|
116
|
path = row['path']
|
117
|
except KeyError, e: wrap_e(e)
|
118
|
try: next = xpath.parse(next)
|
119
|
except Parser.SyntaxError, e: wrap_e(e)
|
120
|
try: next = next[0].name
|
121
|
except IndexError, e: wrap_e(e)
|
122
|
|
123
|
return put_(path)
|
124
|
|
125
|
is_literals = in_table == None
|
126
|
in_tables = []
|
127
|
no_empty = set()
|
128
|
if not is_literals:
|
129
|
in_tables.append(in_table)
|
130
|
no_empty.add(in_table)
|
131
|
|
132
|
def pkey_name(table): return sql.pkey_name(db, table, True)
|
133
|
|
134
|
# Add fkey to parent
|
135
|
if parent_ids_loc is not no_parent_ids_loc:
|
136
|
if sql_gen.is_table_col(parent_ids_loc):
|
137
|
no_empty.add(parent_ids_loc.table)
|
138
|
parent_ptr = node.getAttribute('fkey')
|
139
|
if parent_ptr == '': parent_ptr = pkey_name(name_of(node.parentNode))
|
140
|
row[parent_ptr] = parent_ids_loc
|
141
|
|
142
|
# Parse input columns
|
143
|
row = row.inner # now allow keys to be overwritten
|
144
|
for out_col, value in row.iteritems():
|
145
|
if (not is_literals and util.is_str(value)
|
146
|
and value.startswith(input_col_prefix)): # value is input column
|
147
|
row[out_col] = sql_gen.Col(strings.remove_prefix(input_col_prefix,
|
148
|
value), in_table)
|
149
|
|
150
|
# Optimizations for structural XML functions
|
151
|
if out_table == '_alt': # return first arg if non-NULL
|
152
|
args = row.items()
|
153
|
args.sort()
|
154
|
out_col, value = min(args) # first arg
|
155
|
if xml_dom.is_node(value): row[out_col] = value = put_(value)
|
156
|
if not sql_gen.is_nullable(db, value): return value
|
157
|
|
158
|
# Process values
|
159
|
parent_ids_loc = no_parent_ids_loc # applies to this section
|
160
|
for out_col, value in row.iteritems():
|
161
|
# Handle forward pointers
|
162
|
if xml_dom.is_node(value): row[out_col] = value = put_(value)
|
163
|
|
164
|
# Translate values
|
165
|
if isinstance(value, sql_gen.Col): # value is table column
|
166
|
assert sql_gen.is_table_col(value)
|
167
|
if value.table is not in_table: in_tables.append(value.table)
|
168
|
else: # value is literal value
|
169
|
row[out_col] = sql_gen.NamedCol(out_col, value)
|
170
|
|
171
|
# Insert node
|
172
|
try: pkeys_loc = sql_io.put_table(db, out_table, in_tables, row,
|
173
|
row_ins_ct_ref, next, col_defaults, on_error_)
|
174
|
except Exception, e:
|
175
|
augment_error(e)
|
176
|
raise
|
177
|
if sql_gen.is_table_col(pkeys_loc): no_empty.add(pkeys_loc.table)
|
178
|
|
179
|
sql.empty_temp(db, set(in_tables) - no_empty)
|
180
|
|
181
|
# Insert children with fkeys to parent
|
182
|
parent_ids_loc = pkeys_loc # applies to this section
|
183
|
for child in children: put_(child)
|
184
|
|
185
|
return pkeys_loc
|
186
|
|
187
|
def get(db, node, limit=None, start=None):
|
188
|
def pkey_name(table): return sql.pkey_name(db, table)
|
189
|
|
190
|
node = node.firstChild
|
191
|
table = name_of(node)
|
192
|
pkey_ = pkey_name(table)
|
193
|
|
194
|
fields = []
|
195
|
conds = {}
|
196
|
for child in xml_dom.NodeElemIter(node):
|
197
|
child_name = name_of(child)
|
198
|
if xml_dom.is_empty(child): fields.append(child_name)
|
199
|
elif xml_dom.is_text(child): conds[child_name] = xml_dom.value(child)
|
200
|
else: raise Exception('Joins not supported yet')
|
201
|
id_ = xml_dom.get_id(node)
|
202
|
if id_ != None: conds[pkey_name(table)] = id_ # replace any existing value
|
203
|
if fields == []: fields.append(pkey_)
|
204
|
|
205
|
return sql.select(db, table, fields, conds, limit, start)
|
206
|
|
207
|
# Controls when and how put_table() will partition the input table
|
208
|
partition_size = 1000000 # rows; must be >= NCBI.nodes size
|
209
|
|
210
|
def put_table(db, node, in_table, in_row_ct_ref=None, row_ins_ct_ref=None,
|
211
|
limit=None, start=0, on_error=exc.reraise, col_defaults={},
|
212
|
partition_size=partition_size):
|
213
|
'''
|
214
|
@param node The XML tree that transforms the input to the output. Similar to
|
215
|
put()'s node param, but with the input column name prefixed by
|
216
|
input_col_prefix in place of the column value.
|
217
|
@return sql_gen.Col Where the pkeys (from INSERT RETURNING) are made
|
218
|
available
|
219
|
'''
|
220
|
if in_table == None:
|
221
|
return put(db, node, row_ins_ct_ref, on_error, col_defaults)
|
222
|
|
223
|
in_table = sql_gen.as_Table(in_table)
|
224
|
sql_io.mk_errors_table(db, in_table)
|
225
|
in_table.set_srcs([in_table], overwrite=False)
|
226
|
db.src = strings.ustr(in_table)
|
227
|
|
228
|
db.autoexplain = True # but don't do this in row-based import
|
229
|
|
230
|
# Subset and partition in_table
|
231
|
# OK to do even if table already the right size because it takes <1 sec.
|
232
|
full_in_table = in_table
|
233
|
pkeys_loc = None # used if loop is never executed
|
234
|
total = 0
|
235
|
while limit == None or total < limit:
|
236
|
# Adjust partition size if last partition
|
237
|
this_limit = partition_size
|
238
|
if limit != None: this_limit = min(this_limit, limit - total)
|
239
|
|
240
|
# Row # is interally 0-based, but 1-based to the user
|
241
|
db.log_debug('********** Partition: rows '+str(start+1)+'-'
|
242
|
+str(start+this_limit)+' **********', level=1.2)
|
243
|
|
244
|
# Subset in_table
|
245
|
in_table = sql_gen.Table(strings.ustr(full_in_table),
|
246
|
srcs=full_in_table.srcs, is_temp=True) # prepend schema to name
|
247
|
sql.copy_table_struct(db, full_in_table, in_table)
|
248
|
try: sql.add_row_num(db, in_table, 'row_num')
|
249
|
except sql.DatabaseErrors: pass # already has pkey
|
250
|
cur = sql.insert_select(db, in_table, None, sql.mk_select(db,
|
251
|
full_in_table, limit=this_limit, start=start))
|
252
|
|
253
|
this_ct = cur.rowcount
|
254
|
total += this_ct
|
255
|
start += this_ct # advance start to fetch next set
|
256
|
if this_ct == 0: break # in_table size is multiple of partition_size
|
257
|
|
258
|
# Import data
|
259
|
pkeys_loc = put(db, node, row_ins_ct_ref, on_error, col_defaults,
|
260
|
in_table)
|
261
|
if in_row_ct_ref != None: in_row_ct_ref[0] += this_ct
|
262
|
|
263
|
sql.empty_temp(db, in_table)
|
264
|
|
265
|
if this_ct < partition_size: break # partial partition = last
|
266
|
|
267
|
# Work around PostgreSQL's temp table disk space leak
|
268
|
db.reconnect()
|
269
|
|
270
|
return pkeys_loc
|