1 |
5079
|
aaronmk
|
#!/usr/bin/env python
|
2 |
|
|
# Scrubs the taxonpaths in VegBIEN using TNRS.
|
3 |
|
|
# Runs continuously until no new rows are added after max_pause.
|
4 |
|
|
|
5 |
|
|
import os.path
|
6 |
|
|
import StringIO
|
7 |
|
|
import sys
|
8 |
|
|
import time
|
9 |
|
|
|
10 |
|
|
sys.path.append(os.path.dirname(__file__)+"/../lib")
|
11 |
|
|
|
12 |
|
|
import csvs
|
13 |
|
|
import opts
|
14 |
5098
|
aaronmk
|
import profiling
|
15 |
5079
|
aaronmk
|
import sql
|
16 |
|
|
import sql_gen
|
17 |
|
|
import sql_io
|
18 |
|
|
import streams
|
19 |
|
|
import strings
|
20 |
|
|
import tnrs
|
21 |
|
|
|
22 |
|
|
# Config
|
23 |
|
|
pause = 60 # sec
|
24 |
|
|
max_pause = 2*60*60 # sec; = 2 hr; must be >= max import time of one partition
|
25 |
|
|
assert pause <= max_pause
|
26 |
|
|
|
27 |
|
|
tnrs_data = sql_gen.Table('tnrs')
|
28 |
|
|
|
29 |
|
|
def main():
|
30 |
|
|
# Input
|
31 |
|
|
env_names = []
|
32 |
|
|
db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
|
33 |
|
|
verbosity = float(opts.get_env_var('verbosity', 3, env_names))
|
34 |
|
|
if not 'engine' in db_config: raise SystemExit('Usage: '
|
35 |
|
|
+opts.env_usage(env_names)+' '+sys.argv[0]+' 2>>log')
|
36 |
|
|
|
37 |
|
|
def log(msg, level=1):
|
38 |
|
|
'''Higher level -> more verbose'''
|
39 |
|
|
if level <= verbosity:
|
40 |
|
|
sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
|
41 |
|
|
|
42 |
|
|
# Connect to DB
|
43 |
|
|
db = sql.connect(db_config, log_debug=log)
|
44 |
|
|
|
45 |
5098
|
aaronmk
|
tnrs_profiler = profiling.ItersProfiler(iter_text='row')
|
46 |
|
|
try:
|
47 |
|
|
# Iterate over unscrubbed verbatim taxonpaths
|
48 |
|
|
start = 0
|
49 |
5079
|
aaronmk
|
total_pause = 0
|
50 |
5098
|
aaronmk
|
while True:
|
51 |
|
|
# Fetch next set
|
52 |
|
|
cur = sql.select(db, 'taxonpath', ['taxonomicnamewithauthor'],
|
53 |
5121
|
aaronmk
|
[('canon_taxonpath_id', None)], limit=tnrs.max_names,
|
54 |
5101
|
aaronmk
|
start=start, cacheable=False)
|
55 |
5098
|
aaronmk
|
this_ct = cur.rowcount
|
56 |
|
|
start += this_ct # advance start to fetch next set
|
57 |
|
|
if this_ct == 0:
|
58 |
|
|
total_pause += pause
|
59 |
|
|
if total_pause > max_pause: break
|
60 |
|
|
log('Waited '+str(total_pause)+' sec. Waiting...')
|
61 |
|
|
time.sleep(pause) # wait for more rows
|
62 |
|
|
continue # try again
|
63 |
|
|
# otherwise, rows found
|
64 |
|
|
total_pause = 0
|
65 |
5121
|
aaronmk
|
names = list(sql.values(cur))
|
66 |
5098
|
aaronmk
|
|
67 |
|
|
# Run TNRS
|
68 |
5100
|
aaronmk
|
log('Processing '+str(this_ct)+' taxonpaths')
|
69 |
5098
|
aaronmk
|
log('Making TNRS request')
|
70 |
|
|
tnrs_profiler.start()
|
71 |
|
|
try:
|
72 |
5121
|
aaronmk
|
try: stream = tnrs.repeated_tnrs_request(names)
|
73 |
5098
|
aaronmk
|
finally: tnrs_profiler.stop(iter_ct=this_ct)
|
74 |
5099
|
aaronmk
|
except tnrs.InvalidResponse: pass # skip set in case it caused error
|
75 |
5098
|
aaronmk
|
else:
|
76 |
|
|
log('Storing TNRS response data')
|
77 |
|
|
stream_info = csvs.stream_info(stream, parse_header=True)
|
78 |
|
|
stream = streams.ProgressInputStream(stream, sys.stderr, n=1000)
|
79 |
|
|
sql_io.append_csv(db, tnrs_data, stream_info, stream)
|
80 |
|
|
finally:
|
81 |
|
|
log(tnrs_profiler.msg())
|
82 |
5079
|
aaronmk
|
|
83 |
|
|
main()
|