1
|
# XML-database conversion
|
2
|
|
3
|
import copy
|
4
|
import re
|
5
|
from xml.dom import Node
|
6
|
|
7
|
import dicts
|
8
|
import exc
|
9
|
import Parser
|
10
|
import sql
|
11
|
import sql_io
|
12
|
import sql_gen
|
13
|
import strings
|
14
|
import util
|
15
|
import xml_dom
|
16
|
import xml_func
|
17
|
import xpath
|
18
|
|
19
|
def name_of(node): return re.sub(r'^.*\.', r'', node.tagName)
|
20
|
|
21
|
ptr_suffix = '_id'
|
22
|
|
23
|
def is_ptr(node_name): return node_name.lower().endswith(ptr_suffix)
|
24
|
|
25
|
def ptr_type_guess(node_name):
|
26
|
assert is_ptr(node_name)
|
27
|
return node_name[:-len(ptr_suffix)]
|
28
|
|
29
|
def ptr_target(node):
|
30
|
assert is_ptr(name_of(node))
|
31
|
return xml_dom.value_node(node)
|
32
|
|
33
|
def find_by_name(node, name):
|
34
|
for parent in xml_dom.NodeParentIter(node):
|
35
|
if name_of(parent) == name: return parent
|
36
|
else:
|
37
|
for child in xml_dom.NodeElemIter(parent):
|
38
|
child_name = name_of(child)
|
39
|
if is_ptr(child_name):
|
40
|
target = ptr_target(child)
|
41
|
if target.tagName == name: return target
|
42
|
elif child_name == name: return child
|
43
|
return None
|
44
|
|
45
|
class ColRef:
|
46
|
'''A reference to a table column'''
|
47
|
def __init__(self, name, idx):
|
48
|
self.name = name
|
49
|
self.idx = idx
|
50
|
|
51
|
def __str__(self): return self.name
|
52
|
|
53
|
input_col_prefix = xml_func.var_name_prefix
|
54
|
|
55
|
put_special_funcs = set(['_simplifyPath'])
|
56
|
|
57
|
def put(db, node, row_ins_ct_ref=None, on_error=exc.reraise,
|
58
|
col_defaults={}, in_table=None, parent_ids_loc=None, next=None):
|
59
|
'''
|
60
|
@param node To use an entire XML document, pass root.firstChild.
|
61
|
'''
|
62
|
if node == None: return None # when no rows, root.firstChild == None
|
63
|
|
64
|
def put_(node):
|
65
|
return put(db, node, row_ins_ct_ref, on_error, col_defaults, in_table,
|
66
|
parent_ids_loc, next)
|
67
|
|
68
|
def augment_error(e): exc.add_msg(e, 'node:\n'+strings.ustr(node))
|
69
|
def on_error_(e):
|
70
|
augment_error(e)
|
71
|
on_error(e)
|
72
|
|
73
|
def wrap_e(e):
|
74
|
augment_error(e)
|
75
|
raise xml_func.SyntaxError(e)
|
76
|
|
77
|
is_func = xml_func.is_func(node)
|
78
|
out_table = name_of(node)
|
79
|
|
80
|
# Divide children into fields and children with fkeys to parent
|
81
|
row = dicts.OnceOnlyDict()
|
82
|
children = []
|
83
|
try:
|
84
|
for child in xml_dom.NodeElemIter(node):
|
85
|
child_name = name_of(child)
|
86
|
if xml_dom.is_empty(child): row[child_name] = None
|
87
|
elif xml_dom.is_text(child):
|
88
|
row[child_name] = strings.to_unicode(xml_dom.value(child))
|
89
|
else:
|
90
|
child_value = xml_dom.value_node(child)
|
91
|
if ((is_func or is_ptr(child_name)
|
92
|
or xml_func.is_func(child_value))
|
93
|
and not xml_func.is_func(child)):
|
94
|
row[child_name] = child_value
|
95
|
else: children.append(child)
|
96
|
except dicts.KeyExistsError, e: wrap_e(e)
|
97
|
|
98
|
# Special handling for structural XML functions
|
99
|
if out_table == '_simplifyPath':
|
100
|
# Parse args
|
101
|
try:
|
102
|
next = row['next'] # modifies outer next var used by put_()
|
103
|
path = row['path']
|
104
|
except KeyError, e: wrap_e(e)
|
105
|
try: next = xpath.parse(next)
|
106
|
except Parser.SyntaxError, e: wrap_e(e)
|
107
|
try: next = next[0].name
|
108
|
except IndexError, e: wrap_e(e)
|
109
|
|
110
|
return put_(path)
|
111
|
|
112
|
is_literals = in_table == None
|
113
|
in_tables = []
|
114
|
no_empty = set()
|
115
|
if not is_literals:
|
116
|
in_tables.append(in_table)
|
117
|
no_empty.add(in_table)
|
118
|
|
119
|
def pkey_name(table): return sql.pkey_name(db, table, True)
|
120
|
|
121
|
# Add fkey to parent
|
122
|
if parent_ids_loc != None:
|
123
|
if sql_gen.is_table_col(parent_ids_loc):
|
124
|
no_empty.add(parent_ids_loc.table)
|
125
|
parent_ptr = node.getAttribute('fkey')
|
126
|
if parent_ptr == '': parent_ptr = pkey_name(name_of(node.parentNode))
|
127
|
row[parent_ptr] = parent_ids_loc
|
128
|
|
129
|
# Parse input columns
|
130
|
row = row.inner # now allow keys to be overwritten
|
131
|
for out_col, value in row.iteritems():
|
132
|
if (not is_literals and util.is_str(value)
|
133
|
and value.startswith(input_col_prefix)): # value is input column
|
134
|
row[out_col] = sql_gen.Col(strings.remove_prefix(input_col_prefix,
|
135
|
value), in_table)
|
136
|
|
137
|
# Optimizations for structural XML functions
|
138
|
if out_table == '_alt': # return first arg if non-NULL
|
139
|
args = row.items()
|
140
|
args.sort()
|
141
|
out_col, value = min(args) # first arg
|
142
|
if xml_dom.is_node(value): row[out_col] = value = put_(value)
|
143
|
if not sql_gen.is_nullable(db, value): return value
|
144
|
|
145
|
# Process values
|
146
|
parent_ids_loc = None # applies to this section
|
147
|
for out_col, value in row.iteritems():
|
148
|
# Handle forward pointers
|
149
|
if xml_dom.is_node(value): row[out_col] = value = put_(value)
|
150
|
|
151
|
# Translate values
|
152
|
if isinstance(value, sql_gen.Col): # value is table column
|
153
|
assert sql_gen.is_table_col(value)
|
154
|
if value.table is not in_table: in_tables.append(value.table)
|
155
|
else: # value is literal value
|
156
|
row[out_col] = sql_gen.NamedCol(out_col, value)
|
157
|
|
158
|
# Insert node
|
159
|
try: pkeys_loc = sql_io.put_table(db, out_table, in_tables, row,
|
160
|
row_ins_ct_ref, next, col_defaults, on_error_)
|
161
|
except Exception, e:
|
162
|
augment_error(e)
|
163
|
raise
|
164
|
if sql_gen.is_table_col(pkeys_loc): no_empty.add(pkeys_loc.table)
|
165
|
|
166
|
sql.empty_temp(db, set(in_tables) - no_empty)
|
167
|
|
168
|
# Insert children with fkeys to parent
|
169
|
parent_ids_loc = pkeys_loc # applies to this section
|
170
|
if parent_ids_loc != None: # not if this node had an error
|
171
|
for child in children: put_(child)
|
172
|
|
173
|
return pkeys_loc
|
174
|
|
175
|
def get(db, node, limit=None, start=None):
|
176
|
def pkey_name(table): return sql.pkey_name(db, table)
|
177
|
|
178
|
node = node.firstChild
|
179
|
table = name_of(node)
|
180
|
pkey_ = pkey_name(table)
|
181
|
|
182
|
fields = []
|
183
|
conds = {}
|
184
|
for child in xml_dom.NodeElemIter(node):
|
185
|
child_name = name_of(child)
|
186
|
if xml_dom.is_empty(child): fields.append(child_name)
|
187
|
elif xml_dom.is_text(child): conds[child_name] = xml_dom.value(child)
|
188
|
else: raise Exception('Joins not supported yet')
|
189
|
id_ = xml_dom.get_id(node)
|
190
|
if id_ != None: conds[pkey_name(table)] = id_ # replace any existing value
|
191
|
if fields == []: fields.append(pkey_)
|
192
|
|
193
|
return sql.select(db, table, fields, conds, limit, start)
|
194
|
|
195
|
# Controls when and how put_table() will partition the input table
|
196
|
partition_size = 1000000 # rows; must be >= NCBI.nodes size
|
197
|
|
198
|
def put_table(db, node, in_table, in_row_ct_ref=None, row_ins_ct_ref=None,
|
199
|
limit=None, start=0, on_error=exc.reraise, col_defaults={},
|
200
|
partition_size=partition_size):
|
201
|
'''
|
202
|
@param node The XML tree that transforms the input to the output. Similar to
|
203
|
put()'s node param, but with the input column name prefixed by
|
204
|
input_col_prefix in place of the column value.
|
205
|
@return sql_gen.Col Where the pkeys (from INSERT RETURNING) are made
|
206
|
available
|
207
|
'''
|
208
|
in_table = sql_gen.as_Table(in_table)
|
209
|
sql_io.mk_errors_table(db, in_table)
|
210
|
in_table.set_srcs([in_table], overwrite=False)
|
211
|
db.src = strings.ustr(in_table)
|
212
|
|
213
|
db.autoanalyze = True # but don't do this in row-based import
|
214
|
db.autoexplain = True # but don't do this in row-based import
|
215
|
|
216
|
# Subset and partition in_table
|
217
|
# OK to do even if table already the right size because it takes <1 sec.
|
218
|
full_in_table = in_table
|
219
|
pkeys_loc = None # used if loop is never executed
|
220
|
total = 0
|
221
|
while limit == None or total < limit:
|
222
|
# Adjust partition size if last partition
|
223
|
this_limit = partition_size
|
224
|
if limit != None: this_limit = min(this_limit, limit - total)
|
225
|
|
226
|
# Row # is interally 0-based, but 1-based to the user
|
227
|
db.log_debug('********** Partition: rows '+str(start+1)+'-'
|
228
|
+str(start+this_limit)+' **********', level=1.2)
|
229
|
|
230
|
# Subset in_table
|
231
|
in_table = sql_gen.Table(strings.ustr(full_in_table),
|
232
|
srcs=full_in_table.srcs, is_temp=True) # prepend schema to name
|
233
|
sql.copy_table_struct(db, full_in_table, in_table)
|
234
|
try: sql.add_row_num(db, in_table, 'row_num')
|
235
|
except sql.DatabaseErrors: pass # already has pkey
|
236
|
cur = sql.insert_select(db, in_table, None, sql.mk_select(db,
|
237
|
full_in_table, limit=this_limit, start=start))
|
238
|
|
239
|
this_ct = cur.rowcount
|
240
|
total += this_ct
|
241
|
start += this_ct # advance start to fetch next set
|
242
|
if this_ct == 0: break # in_table size is multiple of partition_size
|
243
|
|
244
|
# Import data
|
245
|
pkeys_loc = put(db, node, row_ins_ct_ref, on_error, col_defaults,
|
246
|
in_table)
|
247
|
if in_row_ct_ref != None: in_row_ct_ref[0] += this_ct
|
248
|
|
249
|
sql.empty_temp(db, in_table)
|
250
|
|
251
|
if this_ct < partition_size: break # partial partition = last
|
252
|
|
253
|
# Work around PostgreSQL's temp table disk space leak
|
254
|
db.reconnect()
|
255
|
|
256
|
return pkeys_loc
|