Revision 5101
Added by Aaron Marcuse-Kubitza about 12 years ago
tnrs_db | ||
---|---|---|
23 | 23 |
pause = 60 # sec |
24 | 24 |
max_pause = 2*60*60 # sec; = 2 hr; must be >= max import time of one partition |
25 | 25 |
assert pause <= max_pause |
26 |
max_taxons = 500 # less than the limit to avoid slowing down the TNRS server |
|
27 | 26 |
|
28 | 27 |
tnrs_data = sql_gen.Table('tnrs') |
29 | 28 |
|
... | ... | |
51 | 50 |
while True: |
52 | 51 |
# Fetch next set |
53 | 52 |
cur = sql.select(db, 'taxonpath', ['taxonomicnamewithauthor'], |
54 |
[('canon_taxonpath_id', None)], limit=max_taxons, start=start,
|
|
55 |
cacheable=False) |
|
53 |
[('canon_taxonpath_id', None)], limit=tnrs.max_taxons,
|
|
54 |
start=start, cacheable=False)
|
|
56 | 55 |
this_ct = cur.rowcount |
57 | 56 |
start += this_ct # advance start to fetch next set |
58 | 57 |
if this_ct == 0: |
Also available in: Unified diff
tnrs_db: Moved lower max_taxons limit to tnrs.py because it's really required to avoid crashing the TNRS server and should apply to all callers