Project

General

Profile

1 5079 aaronmk
#!/usr/bin/env python
2 5323 aaronmk
# Scrubs the taxonconcepts in VegBIEN using TNRS.
3 5079 aaronmk
# Runs continuously until no new rows are added after max_pause.
4
5
import os.path
6
import StringIO
7
import sys
8
import time
9
10
sys.path.append(os.path.dirname(__file__)+"/../lib")
11
12
import csvs
13
import opts
14 5098 aaronmk
import profiling
15 5079 aaronmk
import sql
16
import sql_gen
17
import sql_io
18
import streams
19
import strings
20
import tnrs
21
22
# Config
23 5156 aaronmk
pause = 2*60*60 # sec; = 2 hr
24
max_pause = 9*60*60 # sec; = 9 hr; must be >= max partition import time (1.5 hr)
25 5079 aaronmk
assert pause <= max_pause
26
27
tnrs_data = sql_gen.Table('tnrs')
28
29
def main():
30
    # Input
31
    env_names = []
32
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
33
    verbosity = float(opts.get_env_var('verbosity', 3, env_names))
34 5214 aaronmk
    wait = opts.env_flag('wait', False, env_names)
35 5079 aaronmk
    if not 'engine' in db_config: raise SystemExit('Usage: '
36
        +opts.env_usage(env_names)+' '+sys.argv[0]+' 2>>log')
37
38
    def log(msg, level=1):
39
        '''Higher level -> more verbose'''
40
        if level <= verbosity:
41
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
42
43
    # Connect to DB
44
    db = sql.connect(db_config, log_debug=log)
45
46 5124 aaronmk
    tnrs_profiler = profiling.ItersProfiler(iter_text='name')
47 5123 aaronmk
48 5323 aaronmk
    # Iterate over unscrubbed verbatim taxonconcepts
49 5123 aaronmk
    total_pause = 0
50 5322 aaronmk
    tables = ['taxonconcept', sql_gen.Join('tnrs',
51 5159 aaronmk
        {'Name_submitted': 'identifyingtaxonomicname'}, sql_gen.filter_out)]
52
    # Has a concatenated name and not already linked to an accepted name
53
    conds = [('identifyingtaxonomicname', sql_gen.CompareCond(None, '!=')),
54 5467 aaronmk
        ('matched_concept_id', None)]
55 5123 aaronmk
    while True:
56
        # Fetch next set
57 5159 aaronmk
        cur = sql.select(db, tables, ['identifyingtaxonomicname'], conds,
58
            limit=tnrs.max_names, cacheable=False)
59 5123 aaronmk
        this_ct = cur.rowcount
60 5324 aaronmk
        log('Processing '+str(this_ct)+' taxonconcepts')
61 5123 aaronmk
        if this_ct == 0:
62 5213 aaronmk
            if not wait: break
63 5212 aaronmk
            log('Waited '+str(total_pause)+' sec total')
64 5123 aaronmk
            total_pause += pause
65
            if total_pause > max_pause: break
66 5212 aaronmk
            log('Waiting '+str(pause)+' sec...')
67 5123 aaronmk
            time.sleep(pause) # wait for more rows
68
            continue # try again
69
        # otherwise, rows found
70 5079 aaronmk
        total_pause = 0
71 5123 aaronmk
        names = list(sql.values(cur))
72
73
        # Run TNRS
74
        log('Making TNRS request')
75
        tnrs_profiler.start()
76
        try:
77
            try: stream = tnrs.repeated_tnrs_request(names)
78
            finally:
79
                tnrs_profiler.stop(iter_ct=this_ct)
80
                log('Cumulatively: '+tnrs_profiler.msg())
81
        except tnrs.InvalidResponse: pass # skip set in case it caused error
82
        else:
83
            log('Storing TNRS response data')
84
            stream_info = csvs.stream_info(stream, parse_header=True)
85
            stream = streams.ProgressInputStream(stream, sys.stderr, n=1000)
86
            sql_io.append_csv(db, tnrs_data, stream_info, stream)
87 5079 aaronmk
88
main()