1
|
# XML-database conversion
|
2
|
|
3
|
import copy
|
4
|
import re
|
5
|
from xml.dom import Node
|
6
|
|
7
|
import dicts
|
8
|
import exc
|
9
|
import Parser
|
10
|
import sql
|
11
|
import sql_io
|
12
|
import sql_gen
|
13
|
import strings
|
14
|
import util
|
15
|
import xml_dom
|
16
|
import xml_func
|
17
|
import xpath
|
18
|
|
19
|
def name_of(node): return re.sub(r'^.*\.', r'', node.tagName)
|
20
|
|
21
|
ptr_suffix = '_id'
|
22
|
|
23
|
def is_ptr(node_name): return node_name.lower().endswith(ptr_suffix)
|
24
|
|
25
|
def ptr_type_guess(node_name):
|
26
|
assert is_ptr(node_name)
|
27
|
return node_name[:-len(ptr_suffix)]
|
28
|
|
29
|
def ptr_target(node):
|
30
|
assert is_ptr(name_of(node))
|
31
|
return xml_dom.value_node(node)
|
32
|
|
33
|
def find_by_name(node, name):
|
34
|
for parent in xml_dom.NodeParentIter(node):
|
35
|
if name_of(parent) == name: return parent
|
36
|
else:
|
37
|
for child in xml_dom.NodeElemIter(parent):
|
38
|
child_name = name_of(child)
|
39
|
if is_ptr(child_name):
|
40
|
target = ptr_target(child)
|
41
|
if target.tagName == name: return target
|
42
|
elif child_name == name: return child
|
43
|
return None
|
44
|
|
45
|
class ColRef:
|
46
|
'''A reference to a table column'''
|
47
|
def __init__(self, name, idx):
|
48
|
self.name = name
|
49
|
self.idx = idx
|
50
|
|
51
|
def __str__(self): return self.name
|
52
|
|
53
|
input_col_prefix = xml_func.var_name_prefix
|
54
|
|
55
|
put_special_funcs = set(['_simplifyPath'])
|
56
|
|
57
|
def put(db, node, row_ins_ct_ref=None, on_error=exc.reraise,
|
58
|
col_defaults={}, in_table=None, parent_ids_loc=None, next=None):
|
59
|
'''
|
60
|
@param node To use an entire XML document, pass root.firstChild.
|
61
|
'''
|
62
|
if node == None: return None # when no rows, root.firstChild == None
|
63
|
|
64
|
def put_(node):
|
65
|
return put(db, node, row_ins_ct_ref, on_error, col_defaults, in_table,
|
66
|
parent_ids_loc, next)
|
67
|
|
68
|
def augment_error(e): exc.add_msg(e, 'node:\n'+strings.ustr(node))
|
69
|
def on_error_(e):
|
70
|
augment_error(e)
|
71
|
on_error(e)
|
72
|
|
73
|
def wrap_e(e):
|
74
|
augment_error(e)
|
75
|
raise xml_func.SyntaxError(e)
|
76
|
|
77
|
is_func = xml_func.is_func(node)
|
78
|
out_table = name_of(node)
|
79
|
|
80
|
# Divide children into fields and children with fkeys to parent
|
81
|
row = dicts.OnceOnlyDict()
|
82
|
children = []
|
83
|
try:
|
84
|
for child in xml_dom.NodeElemIter(node):
|
85
|
child_name = name_of(child)
|
86
|
if xml_dom.is_empty(child): row[child_name] = None
|
87
|
elif xml_dom.is_text(child):
|
88
|
row[child_name] = strings.to_unicode(xml_dom.value(child))
|
89
|
else:
|
90
|
child_value = xml_dom.value_node(child)
|
91
|
if (is_func or is_ptr(child_name)
|
92
|
or xml_func.is_func(child_value)):
|
93
|
row[child_name] = child_value
|
94
|
else: children.append(child)
|
95
|
except dicts.KeyExistsError, e: wrap_e(e)
|
96
|
|
97
|
# Special handling for structural XML functions
|
98
|
if out_table == '_simplifyPath':
|
99
|
# Parse args
|
100
|
try:
|
101
|
next = row['next'] # modifies outer next var used by put_()
|
102
|
path = row['path']
|
103
|
except KeyError, e: wrap_e(e)
|
104
|
try: next = xpath.parse(next)
|
105
|
except Parser.SyntaxError, e: wrap_e(e)
|
106
|
try: next = next[0].name
|
107
|
except IndexError, e: wrap_e(e)
|
108
|
|
109
|
return put_(path)
|
110
|
|
111
|
is_literals = in_table == None
|
112
|
in_tables = []
|
113
|
no_empty = set()
|
114
|
if not is_literals:
|
115
|
in_tables.append(in_table)
|
116
|
no_empty.add(in_table)
|
117
|
|
118
|
def pkey(table): return sql.pkey(db, table, True)
|
119
|
|
120
|
# Add fkey to parent
|
121
|
if parent_ids_loc != None:
|
122
|
if sql_gen.is_table_col(parent_ids_loc):
|
123
|
no_empty.add(parent_ids_loc.table)
|
124
|
parent_ptr = node.getAttribute('fkey')
|
125
|
if parent_ptr == '': parent_ptr = pkey(name_of(node.parentNode))
|
126
|
row[parent_ptr] = parent_ids_loc
|
127
|
|
128
|
# Divide fields into input columns and literal values
|
129
|
parent_ids_loc = None # applies to this section
|
130
|
row = row.inner # now allow keys to be overwritten
|
131
|
for out_col, value in row.iteritems():
|
132
|
# Handle forward pointers
|
133
|
if xml_dom.is_node(value): row[out_col] = value = put_(value)
|
134
|
|
135
|
# Translate values
|
136
|
if isinstance(value, sql_gen.Col): # value is temp table column
|
137
|
assert sql_gen.is_table_col(value)
|
138
|
in_tables.append(value.table)
|
139
|
elif (not is_literals and util.is_str(value)
|
140
|
and value.startswith(input_col_prefix)): # value is input column
|
141
|
row[out_col] = sql_gen.Col(strings.remove_prefix(input_col_prefix,
|
142
|
value), in_table)
|
143
|
else: # value is literal value
|
144
|
row[out_col] = sql_gen.NamedCol(out_col, value)
|
145
|
|
146
|
# Insert node
|
147
|
try: pkeys_loc = sql_io.put_table(db, out_table, in_tables, row,
|
148
|
row_ins_ct_ref, next, col_defaults, on_error_)
|
149
|
except Exception, e:
|
150
|
augment_error(e)
|
151
|
raise
|
152
|
|
153
|
sql.empty_temp(db, set(in_tables) - no_empty)
|
154
|
|
155
|
# Insert children with fkeys to parent
|
156
|
parent_ids_loc = pkeys_loc # applies to this section
|
157
|
for child in children: put_(child)
|
158
|
|
159
|
return pkeys_loc
|
160
|
|
161
|
def get(db, node, limit=None, start=None):
|
162
|
def pkey(table): return sql.pkey(db, table)
|
163
|
|
164
|
node = node.firstChild
|
165
|
table = name_of(node)
|
166
|
pkey_ = pkey(table)
|
167
|
|
168
|
fields = []
|
169
|
conds = {}
|
170
|
for child in xml_dom.NodeElemIter(node):
|
171
|
child_name = name_of(child)
|
172
|
if xml_dom.is_empty(child): fields.append(child_name)
|
173
|
elif xml_dom.is_text(child): conds[child_name] = xml_dom.value(child)
|
174
|
else: raise Exception('Joins not supported yet')
|
175
|
id_ = xml_dom.get_id(node)
|
176
|
if id_ != None: conds[pkey(table)] = id_ # replace any existing pkey value
|
177
|
if fields == []: fields.append(pkey_)
|
178
|
|
179
|
return sql.select(db, table, fields, conds, limit, start)
|
180
|
|
181
|
# Controls when and how put_table() will partition the input table
|
182
|
partition_size = 500000 # rows
|
183
|
|
184
|
def put_table(db, node, in_table, in_row_ct_ref=None, row_ins_ct_ref=None,
|
185
|
limit=None, start=0, on_error=exc.reraise, col_defaults={}):
|
186
|
'''
|
187
|
@param node The XML tree that transforms the input to the output. Similar to
|
188
|
put()'s node param, but with the input column name prefixed by
|
189
|
input_col_prefix in place of the column value.
|
190
|
@return sql_gen.Col Where the pkeys (from INSERT RETURNING) are made
|
191
|
available
|
192
|
'''
|
193
|
in_table = sql_gen.as_Table(in_table)
|
194
|
sql_io.mk_errors_table(db, in_table)
|
195
|
in_table.set_srcs([in_table], overwrite=False)
|
196
|
db.src = strings.ustr(in_table)
|
197
|
|
198
|
db.autoanalyze = True # but don't do this in row-based import
|
199
|
db.autoexplain = True # but don't do this in row-based import
|
200
|
|
201
|
# Subset and partition in_table
|
202
|
# OK to do even if table already the right size because it takes <1 sec.
|
203
|
full_in_table = in_table
|
204
|
total = 0
|
205
|
while limit == None or total < limit:
|
206
|
# Adjust partition size if last partition
|
207
|
this_limit = partition_size
|
208
|
if limit != None: this_limit = min(this_limit, limit - total)
|
209
|
|
210
|
# Row # is interally 0-based, but 1-based to the user
|
211
|
db.log_debug('********** Partition: rows '+str(start+1)+'-'
|
212
|
+str(start+this_limit)+' **********', level=1.2)
|
213
|
|
214
|
# Subset in_table
|
215
|
in_table = copy.copy(full_in_table) # don't modify input!
|
216
|
in_table.name = strings.ustr(in_table) # prepend schema
|
217
|
cur = sql.run_query_into(db, sql.mk_select(db, full_in_table,
|
218
|
limit=this_limit, start=start), into=in_table, add_pkey_=True)
|
219
|
# full_in_table will be shadowed (hidden) by created temp table
|
220
|
|
221
|
this_ct = cur.rowcount
|
222
|
total += this_ct
|
223
|
start += this_ct # advance start to fetch next set
|
224
|
if this_ct == 0: break # in_table size is multiple of partition_size
|
225
|
|
226
|
# Recurse
|
227
|
pkeys_loc = put(db, node, row_ins_ct_ref, on_error,
|
228
|
col_defaults, in_table)
|
229
|
if in_row_ct_ref != None: in_row_ct_ref[0] += this_ct
|
230
|
|
231
|
sql.empty_temp(db, in_table)
|
232
|
|
233
|
if this_ct < partition_size: break # partial partition = last
|
234
|
|
235
|
# Work around PostgreSQL's temp table disk space leak
|
236
|
db.reconnect()
|
237
|
|
238
|
return pkeys_loc
|