Revision 1641
Added by Aaron Marcuse-Kubitza almost 13 years ago
inputs/REMIB/src/nodes.all.specimens.csv.make | ||
---|---|---|
64 | 64 |
stream = streams.StreamIter(streams.TimeoutInputStream( |
65 | 65 |
urllib2.urlopen(url), timeout_)) |
66 | 66 |
|
67 |
util.skip(stream, is_ignore) # skip header |
|
68 |
try: |
|
69 |
metadata_row = csv.reader(stream).next() |
|
70 |
assert metadata_row[0] == 'COLLECTION' |
|
71 |
except StopIteration: done = True # empty response = no more nodes |
|
72 |
|
|
73 | 67 |
# Copy lines |
74 | 68 |
try: |
69 |
util.skip(stream, is_ignore) # skip header |
|
70 |
try: |
|
71 |
metadata_row = csv.reader(stream).next() |
|
72 |
assert metadata_row[0] == 'COLLECTION' |
|
73 |
except StopIteration: |
|
74 |
done = True # empty response means no more nodes |
|
75 |
|
|
75 | 76 |
for line in stream: |
76 | 77 |
if is_ignore(line): |
77 | 78 |
error = strings.remove_prefix('\t\t', line) |
Also available in: Unified diff
inputs/REMIB/src/nodes.all.specimens.csv.make: Moved header reading code inside TimeoutException try-except block since read sometimes times out before the header is even read