Project

General

Profile

1 5079 aaronmk
#!/usr/bin/env python
2 5640 aaronmk
# Scrubs the taxonlabels in VegBIEN using TNRS.
3 5079 aaronmk
4 9998 aaronmk
# runtime: 162 ms/name ("real 458m50.126s" for "169,539 name(s)" [1])
5
# [1] $ tail -c +12953807 ../inputs/.TNRS/tnrs/logs/tnrs.make.log.sql|head -15
6
7 9999 aaronmk
# total runtime: 10 days ("Rows (counted) 5221748" (TNRS.tnrs @r9998)
8
# * 162 ms/name (above) * 1s/1000ms * 1h/3600s * 1day/24h = 9.79 days)
9
10 9530 aaronmk
# to estimate total runtime:
11
# bin/psql_vegbien <<<'SELECT COUNT(*) FROM tnrs_input_name'
12
# # names from above * 1.5 multiplier for scrubbing accepted names
13
# (the test_taxonomic_names sample from Brad produces 8 accepted names for
14
# 15 input names)
15
# * ((# ms/name from log file * 1 sec/1000 ms) + (# sec to run
16
#   `SELECT * FROM "tnrs_input_name"` in log file / tnrs.max_names names/batch))
17
# * 1 hr / 3600 sec * 1 day / 24 hr = # days
18
19 5079 aaronmk
import os.path
20
import sys
21
22
sys.path.append(os.path.dirname(__file__)+"/../lib")
23
24
import csvs
25
import opts
26 5098 aaronmk
import profiling
27 5079 aaronmk
import sql
28
import sql_gen
29
import sql_io
30
import strings
31
import tnrs
32
33 5669 aaronmk
tnrs_input = sql_gen.Table('tnrs_input_name')
34 10742 aaronmk
tnrs_batch = sql_gen.Table('batch')
35 13866 aaronmk
tnrs_data = sql_gen.Table('taxon_match_input__copy_to')
36 5079 aaronmk
37
def main():
38
    # Input
39
    env_names = []
40
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
41
    verbosity = float(opts.get_env_var('verbosity', 3, env_names))
42
    if not 'engine' in db_config: raise SystemExit('Usage: '
43
        +opts.env_usage(env_names)+' '+sys.argv[0]+' 2>>log')
44
45
    def log(msg, level=1):
46
        '''Higher level -> more verbose'''
47
        if level <= verbosity:
48
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
49
50
    # Connect to DB
51
    db = sql.connect(db_config, log_debug=log)
52
53 9522 aaronmk
    cumulative_tnrs_profiler = profiling.ItersProfiler(iter_text='name')
54 5123 aaronmk
55 5640 aaronmk
    # Iterate over unscrubbed verbatim taxonlabels
56 9515 aaronmk
    while True:
57 5123 aaronmk
        # Fetch next set
58 5837 aaronmk
        cur = sql.select(db, tnrs_input, limit=tnrs.max_names, cacheable=False)
59 5123 aaronmk
        this_ct = cur.rowcount
60 5640 aaronmk
        log('Processing '+str(this_ct)+' taxonlabels')
61 9518 aaronmk
        if this_ct == 0: break
62 5123 aaronmk
        # otherwise, rows found
63
        names = list(sql.values(cur))
64
65 9515 aaronmk
        def process():
66
            # Run TNRS
67
            log('Making TNRS request')
68 14621 aaronmk
            stream = tnrs.tnrs_request(names, use_tnrs_export=True,
69 9526 aaronmk
                cumulative_profiler=cumulative_tnrs_profiler)
70 9515 aaronmk
71
            log('Storing TNRS response data')
72 10742 aaronmk
            sql.insert(db, tnrs_batch, []) # time_submitted is autopopulated
73 9517 aaronmk
            sql_io.append_csv(db, tnrs_data, *csvs.reader_and_header(stream))
74 9516 aaronmk
        # start transaction *before* submitting data, so Time_submitted is
75
        # correctly set to the submission time rather than the insertion time.
76
        # these may differ by several minutes if TNRS is slow.
77
        sql.with_savepoint(db, process)
78 5079 aaronmk
79
main()