Project

General

Profile

1
#!/usr/bin/env python
2
# Scrubs the taxonlabels in VegBIEN using TNRS.
3
# Runs continuously until no new rows are added after max_pause.
4

    
5
import os.path
6
import StringIO
7
import sys
8
import time
9

    
10
sys.path.append(os.path.dirname(__file__)+"/../lib")
11

    
12
import csvs
13
import dates
14
import opts
15
import profiling
16
import sql
17
import sql_gen
18
import sql_io
19
import streams
20
import strings
21
import tnrs
22

    
23
# Config
24
pause = 2*60*60 # sec; = 2 hr
25
max_pause = 9*60*60 # sec; = 9 hr; must be >= max partition import time (1.5 hr)
26
assert pause <= max_pause
27

    
28
tnrs_input = sql_gen.Table('tnrs_input_name')
29
tnrs_data = sql_gen.Table('tnrs')
30
time_col_name = 'Time_submitted'
31
added_col_names = ['Accepted_scientific_name', 'Max_score']
32

    
33
def main():
34
    # Input
35
    env_names = []
36
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
37
    verbosity = float(opts.get_env_var('verbosity', 3, env_names))
38
    wait = opts.env_flag('wait', False, env_names)
39
    if not 'engine' in db_config: raise SystemExit('Usage: '
40
        +opts.env_usage(env_names)+' '+sys.argv[0]+' 2>>log')
41
    
42
    def log(msg, level=1):
43
        '''Higher level -> more verbose'''
44
        if level <= verbosity:
45
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
46
    
47
    # Connect to DB
48
    db = sql.connect(db_config, log_debug=log)
49
    
50
    tnrs_profiler = profiling.ItersProfiler(iter_text='name')
51
    
52
    # Iterate over unscrubbed verbatim taxonlabels
53
    total_pause = 0
54
    while True:
55
        # Fetch next set
56
        cur = sql.select(db, tnrs_input, limit=tnrs.max_names, cacheable=False)
57
        this_ct = cur.rowcount
58
        log('Processing '+str(this_ct)+' taxonlabels')
59
        if this_ct == 0:
60
            if not wait: break
61
            log('Waited '+str(total_pause)+' sec total')
62
            total_pause += pause
63
            if total_pause > max_pause: break
64
            log('Waiting '+str(pause)+' sec...')
65
            time.sleep(pause) # wait for more rows
66
            continue # try again
67
        # otherwise, rows found
68
        total_pause = 0
69
        names = list(sql.values(cur))
70
        
71
        # Run TNRS
72
        log('Making TNRS request')
73
        now_str = str(dates.now())
74
        tnrs_profiler.start()
75
        try: stream = tnrs.repeated_tnrs_request(names)
76
        finally:
77
            tnrs_profiler.stop(iter_ct=this_ct)
78
            log('Cumulatively: '+tnrs_profiler.msg())
79
        
80
        log('Storing TNRS response data')
81
        reader, header = csvs.reader_and_header(stream)
82
        header.insert(0, time_col_name)
83
        added_cols_idx = len(header)
84
        header += added_col_names
85
        reader = csvs.ColInsertFilter(reader, lambda row, row_num: now_str)
86
        reader = csvs.ColInsertFilter(reader, lambda row, row_num: None,
87
            added_cols_idx, len(added_col_names))
88
        sql_io.append_csv(db, tnrs_data, reader, header)
89

    
90
main()
(70-70/78)