Revision 5097
Added by Aaron Marcuse-Kubitza about 12 years ago
tnrs_db | ||
---|---|---|
22 | 22 |
pause = 60 # sec |
23 | 23 |
max_pause = 2*60*60 # sec; = 2 hr; must be >= max import time of one partition |
24 | 24 |
assert pause <= max_pause |
25 |
max_taxons = 500 # less than the limit to avoid slowing down the TNRS server |
|
25 | 26 |
|
26 | 27 |
tnrs_data = sql_gen.Table('tnrs') |
27 | 28 |
|
... | ... | |
47 | 48 |
while True: |
48 | 49 |
# Fetch next set |
49 | 50 |
cur = sql.select(db, 'taxonpath', ['taxonomicnamewithauthor'], |
50 |
[('canon_taxonpath_id', None)], limit=tnrs.max_taxons, start=start,
|
|
51 |
[('canon_taxonpath_id', None)], limit=max_taxons, start=start, |
|
51 | 52 |
cacheable=False) |
52 | 53 |
this_ct = cur.rowcount |
53 | 54 |
start += this_ct # advance start to fetch next set |
Also available in: Unified diff
tnrs_db: Reduced the chunk size to avoid slowing down the TNRS server