Project

General

Profile

1
# TNRS
2

    
3
import re
4
import sys
5
import time
6
import urllib2
7

    
8
import exc
9
import profiling
10
import streams
11

    
12
# Config
13
initial_pause = 0.35 # sec
14
pause_growth_factor = 1.3
15
max_pause = 30*60 # sec; = 30 min; TNRS sometimes freezes for ~10 min
16
assert initial_pause <= max_pause
17
max_names = 5000 # according to http://tnrs.iplantcollaborative.org/TNRSapp.html
18
max_names = 500 # the maximum above crashes the TNRS server
19

    
20
# Protocol params
21
url_base = 'http://tnrs.iplantcollaborative.org/tnrsdemo/'
22
url = url_base+'search'
23
initial_headers = {
24
    'Content-Type': 'text/x-gwt-rpc; charset=utf-8',
25
    'X-GWT-Module-Base': url_base,
26
    'X-GWT-Permutation': '574AA16D15D917C7704646FD92AFF6B3',
27
}
28
submission_request_template = ('7|0|7|'+url_base+
29
'||org.iplantc.tnrs.demo.client.SearchService|doSearch|\
30
java.lang.String/2004016611|{"sources":"gcc,tropicos,usda", "names":"[names]"\
31
, "type":"matching", "taxonomic":"true", "classification":"tropicos", \
32
"match_to_rank":"true"}|0.05|1|2|3|4|2|5|5|6|7|')
33
submission_response_pattern = r'^//OK\[1,\["(\w+)"\],0,7\]$'
34
retrieval_request_template = ('7|0|15|'+url_base+
35
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService\
36
|getRemoteData|com.extjs.gxt.ui.client.data.PagingLoadConfig|\
37
java.lang.String/2004016611|com.extjs.gxt.ui.client.data.BasePagingLoadConfig/\
38
2011366567|com.extjs.gxt.ui.client.data.RpcMap/3441186752|sortField|sortDir|\
39
com.extjs.gxt.ui.client.Style$SortDir/640452531|offset|java.lang.Integer/\
40
3438268394|limit|{"email":"tnrs@lka5jjs.orv", "key":"[key]", \
41
"taxonomic_constraint":"false", "source_sorting":"false", "first":"false"}\
42
|1|2|3|4|2|5|6|7|0|1|8|4|9|0|10|11|0|12|13|0|14|13|100|15|')
43
retrieval_response_pattern = '^//OK\[.*?\["com.extjs.gxt.ui.client.data.\
44
BasePagingLoadResult/496878394","java.util.ArrayList/4159755760","org.iplantc.\
45
tnrs.demo.shared.BeanTNRSEntry/1039545748",".*"\],0,7\]$'
46
retrieval_response_info_pattern = r'(?ms).*^Set-Cookie: JSESSIONID=(\w+);'
47
download_request_template = ('7|0|6|'+url_base+
48
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService|\
49
downloadRemoteResults|java.lang.String/2004016611|{"name":"tnrs_results.txt", \
50
"mode":"Best", "type":"Detailed", "encoding":"utf8", "dirty":"false", \
51
"sources":"false", "taxonomic":"false", "email":"tnrs@lka5jjs.orv", \
52
"key":"[key]"}|1|2|3|4|1|5|6|')
53
download_response_pattern = '^//OK\[1,\["(.*)"\],0,7\]$'
54
download_url_suffix = '&name=tnrs_results.txt&encoding=utf8'
55

    
56
class InvalidResponse(Exception): pass
57

    
58
def gwt_encode(str_): return re.sub(r'[^\w.() -]+', r' ', str_)
59

    
60
def parse_response(name, pattern, str_, response, response_info):
61
    match = re.match(pattern, str_)
62
    if not match:
63
        raise InvalidResponse('Invalid '+name+' response:\n'+response_info+'\n'
64
            +response)
65
    return match.groups()
66

    
67
def tnrs_request(names, debug=False):
68
    '''
69
    Note that names containing only whitespace characters (after gwt_encode())
70
    are ignored by TNRS and do not receive a response row. Thus, you should
71
    always match up the Name_submitted returned by TNRS with the actual
72
    submitted name to determine the corresponding TNRS response row.
73
    '''
74
    name_ct = len(names)
75
    assert name_ct <= max_names
76
    
77
    # Logging
78
    def debug_log(label, str_=''):
79
        if debug: sys.stderr.write('\n'+label+':\n'+str_+'\n')
80
    
81
    ## HTTP
82
    headers = initial_headers.copy() # don't modify global constant!
83
    
84
    def do_request(request):
85
        debug_log('request', str(request))
86
        response = urllib2.urlopen(urllib2.Request(url, request, headers))
87
        response_str = streams.read_all(response)
88
        response_info = str(response.info())
89
        debug_log('response info', response_info)
90
        debug_log('response str', response_str)
91
        return response_str, response_info
92
    
93
    def do_repeated_request(request):
94
        pause = initial_pause
95
        total_pause = 0
96
        while True:
97
            total_pause += pause
98
            if total_pause > max_pause: raise # error is not temporary
99
            debug_log('total_pause', str(total_pause)+'s')
100
            time.sleep(pause) # wait for job to complete
101
            
102
            try: return do_request(request)
103
            except urllib2.HTTPError: pass # try again
104
            pause *= pause_growth_factor
105
    
106
    profiler = profiling.ItersProfiler(start_now=True, iter_text='name')
107
    try:
108
        debug_log('Submit')
109
        request = submission_request_template.replace('[names]',
110
            r'\\n'.join(map(gwt_encode, names))) # double-escape \n
111
        response, response_info = do_request(request)
112
        key, = parse_response('submission', submission_response_pattern, response,
113
            response, response_info)
114
        debug_log('key', key)
115
        key_enc = gwt_encode(key)
116
        
117
        debug_log('Retrieve')
118
        request = retrieval_request_template.replace('[key]', key_enc)
119
        response, response_info = do_repeated_request(request)
120
        parse_response('retrieval', retrieval_response_pattern, response, response,
121
            response_info)
122
        session_id, = parse_response('retrieval info',
123
            retrieval_response_info_pattern, response_info, response, response_info)
124
        debug_log('session_id', session_id)
125
        headers['Cookie'] = 'JSESSIONID='+session_id
126
        
127
        # The output of the retrieve step is unusable because the array has
128
        # different lengths depending on the taxonomic ranks present in the provided
129
        # taxon name. The extra download step is therefore necessary.
130
        
131
        debug_log('Prepare download')
132
        request = download_request_template.replace('[key]', key_enc)
133
        response, response_info = do_request(request)
134
        csv_url, = parse_response('download', download_response_pattern, response,
135
            response, response_info)
136
        csv_url += download_url_suffix
137
        debug_log('csv_url', csv_url)
138
        
139
        debug_log('Download')
140
        response = urllib2.urlopen(urllib2.Request(csv_url))
141
        debug_log('response info', str(response.info()))
142
        return response
143
    finally:
144
        profiler.stop(name_ct)
145
        sys.stderr.write(profiler.msg()+'\n')
146

    
147
def repeated_tnrs_request(names, debug=False, **kw_args):
148
    for try_num in xrange(2):
149
        try: return tnrs_request(names, debug, **kw_args)
150
        except InvalidResponse, e:
151
            exc.print_ex(e, detail=False)
152
            debug = True
153
            # try again with debug turned on
154
    raise # error is not temporary
(34-34/41)