Project

General

Profile

1
#!/usr/bin/env python
2
# Scrubs the taxonconcepts in VegBIEN using TNRS.
3
# Runs continuously until no new rows are added after max_pause.
4

    
5
import os.path
6
import StringIO
7
import sys
8
import time
9

    
10
sys.path.append(os.path.dirname(__file__)+"/../lib")
11

    
12
import csvs
13
import opts
14
import profiling
15
import sql
16
import sql_gen
17
import sql_io
18
import streams
19
import strings
20
import tnrs
21

    
22
# Config
23
pause = 2*60*60 # sec; = 2 hr
24
max_pause = 9*60*60 # sec; = 9 hr; must be >= max partition import time (1.5 hr)
25
assert pause <= max_pause
26

    
27
tnrs_data = sql_gen.Table('tnrs')
28

    
29
def main():
30
    # Input
31
    env_names = []
32
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
33
    verbosity = float(opts.get_env_var('verbosity', 3, env_names))
34
    wait = opts.env_flag('wait', False, env_names)
35
    if not 'engine' in db_config: raise SystemExit('Usage: '
36
        +opts.env_usage(env_names)+' '+sys.argv[0]+' 2>>log')
37
    
38
    def log(msg, level=1):
39
        '''Higher level -> more verbose'''
40
        if level <= verbosity:
41
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
42
    
43
    # Connect to DB
44
    db = sql.connect(db_config, log_debug=log)
45
    
46
    tnrs_profiler = profiling.ItersProfiler(iter_text='name')
47
    
48
    # Iterate over unscrubbed verbatim taxonconcepts
49
    total_pause = 0
50
    tables = ['taxonconcept', sql_gen.Join('tnrs',
51
        {'Name_submitted': 'identifyingtaxonomicname'}, sql_gen.filter_out)]
52
    # Has a concatenated name and not already linked to an accepted name
53
    conds = [('identifyingtaxonomicname', sql_gen.CompareCond(None, '!=')),
54
        ('canon_taxonconcept_id', None)]
55
    while True:
56
        # Fetch next set
57
        cur = sql.select(db, tables, ['identifyingtaxonomicname'], conds,
58
            limit=tnrs.max_names, cacheable=False)
59
        this_ct = cur.rowcount
60
        log('Processing '+str(this_ct)+' taxonconcepts')
61
        if this_ct == 0:
62
            if not wait: break
63
            log('Waited '+str(total_pause)+' sec total')
64
            total_pause += pause
65
            if total_pause > max_pause: break
66
            log('Waiting '+str(pause)+' sec...')
67
            time.sleep(pause) # wait for more rows
68
            continue # try again
69
        # otherwise, rows found
70
        total_pause = 0
71
        names = list(sql.values(cur))
72
        
73
        # Run TNRS
74
        log('Making TNRS request')
75
        tnrs_profiler.start()
76
        try:
77
            try: stream = tnrs.repeated_tnrs_request(names)
78
            finally:
79
                tnrs_profiler.stop(iter_ct=this_ct)
80
                log('Cumulatively: '+tnrs_profiler.msg())
81
        except tnrs.InvalidResponse: pass # skip set in case it caused error
82
        else:
83
            log('Storing TNRS response data')
84
            stream_info = csvs.stream_info(stream, parse_header=True)
85
            stream = streams.ProgressInputStream(stream, sys.stderr, n=1000)
86
            sql_io.append_csv(db, tnrs_data, stream_info, stream)
87

    
88
main()
(56-56/61)