Revision 14615
Added by Aaron Marcuse-Kubitza over 10 years ago
trunk/lib/csvs.py | ||
---|---|---|
277 | 277 |
pairs += sorted(dict_.items()) # then remaining cols in alphabetical order |
278 | 278 |
return (dicts.pair_keys(pairs), dicts.pair_values(pairs)) |
279 | 279 |
|
280 |
class row_dict_to_list_reader(Filter):
|
|
280 |
class row_dict_to_list_reader(WrapReader):
|
|
281 | 281 |
'''reads dict-based rows as list-based rows |
282 | 282 |
@param reader [{'col': 'value', __}, __] |
283 | 283 |
''' |
284 | 284 |
def __init__(self, reader, col_order=[]): |
285 |
WrapReader.__init__(self, reader) |
|
286 |
self.col_order = col_order |
|
285 | 287 |
self.header = None |
286 |
|
|
287 |
def filter_(row_dict): |
|
288 |
header, row = row_dict_to_list(row_dict, col_order) |
|
289 |
|
|
288 |
self.next_row = None |
|
289 |
|
|
290 |
def next(self): |
|
291 |
if self.next_row != None: # 1st dict row: data |
|
292 |
row = self.next_row # return cached row instead of reading new row |
|
293 |
self.next_row = None |
|
294 |
else: |
|
295 |
row_dict = WrapReader.next(self) |
|
296 |
header, row = row_dict_to_list(row_dict, self.col_order) |
|
290 | 297 |
if self.header == None: # 1st dict row: header |
291 | 298 |
self.header = header |
292 | 299 |
self.next_row = row |
293 | 300 |
row = header |
294 |
elif self.next_row != None: # 1st dict row: data |
|
295 |
row = self.next_row |
|
296 |
self.next_row = None |
|
297 | 301 |
else: # remaining dict rows |
298 | 302 |
assert header == self.header # all rows must have same cols |
299 |
|
|
300 |
return row |
|
301 |
Filter.__init__(self, filter_, reader) |
|
302 |
self.next_row = None |
|
303 |
|
|
304 |
return row |
|
303 | 305 |
|
304 | 306 |
class JsonReader(MultiFilter): |
305 | 307 |
'''reads parsed JSON data as row tuples |
Also available in: Unified diff
bugfix: lib/csvs.py: row_dict_to_list_reader: need to override next() directly instead of just using Filter, because Filter doesn't support returning multiple rows for one input row (in this case, prepending a header row). this caused the 1st data row to be missing.