Project

General

Profile

1 4990 aaronmk
# TNRS
2
3
import re
4
import sys
5
import time
6
import urllib2
7
8 5149 aaronmk
import csvs
9 5107 aaronmk
import exc
10 5120 aaronmk
import profiling
11 4990 aaronmk
import streams
12 5144 aaronmk
import strings
13 4990 aaronmk
14
# Config
15
initial_pause = 0.35 # sec
16
pause_growth_factor = 1.3
17 5125 aaronmk
max_pause = 30*60 # sec; = 30 min; TNRS sometimes freezes for ~10 min
18 4990 aaronmk
assert initial_pause <= max_pause
19 5121 aaronmk
max_names = 5000 # according to http://tnrs.iplantcollaborative.org/TNRSapp.html
20
max_names = 500 # the maximum above crashes the TNRS server
21 4990 aaronmk
22
# Protocol params
23
url_base = 'http://tnrs.iplantcollaborative.org/tnrsdemo/'
24
url = url_base+'search'
25
initial_headers = {
26
    'Content-Type': 'text/x-gwt-rpc; charset=utf-8',
27
    'X-GWT-Module-Base': url_base,
28
    'X-GWT-Permutation': '574AA16D15D917C7704646FD92AFF6B3',
29
}
30
submission_request_template = ('7|0|7|'+url_base+
31
'||org.iplantc.tnrs.demo.client.SearchService|doSearch|\
32 5121 aaronmk
java.lang.String/2004016611|{"sources":"gcc,tropicos,usda", "names":"[names]"\
33 4990 aaronmk
, "type":"matching", "taxonomic":"true", "classification":"tropicos", \
34
"match_to_rank":"true"}|0.05|1|2|3|4|2|5|5|6|7|')
35
submission_response_pattern = r'^//OK\[1,\["(\w+)"\],0,7\]$'
36
retrieval_request_template = ('7|0|15|'+url_base+
37
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService\
38
|getRemoteData|com.extjs.gxt.ui.client.data.PagingLoadConfig|\
39
java.lang.String/2004016611|com.extjs.gxt.ui.client.data.BasePagingLoadConfig/\
40
2011366567|com.extjs.gxt.ui.client.data.RpcMap/3441186752|sortField|sortDir|\
41
com.extjs.gxt.ui.client.Style$SortDir/640452531|offset|java.lang.Integer/\
42
3438268394|limit|{"email":"tnrs@lka5jjs.orv", "key":"[key]", \
43
"taxonomic_constraint":"false", "source_sorting":"false", "first":"false"}\
44
|1|2|3|4|2|5|6|7|0|1|8|4|9|0|10|11|0|12|13|0|14|13|100|15|')
45
retrieval_response_pattern = '^//OK\[.*?\["com.extjs.gxt.ui.client.data.\
46
BasePagingLoadResult/496878394","java.util.ArrayList/4159755760","org.iplantc.\
47
tnrs.demo.shared.BeanTNRSEntry/1039545748",".*"\],0,7\]$'
48
retrieval_response_info_pattern = r'(?ms).*^Set-Cookie: JSESSIONID=(\w+);'
49
download_request_template = ('7|0|6|'+url_base+
50
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService|\
51
downloadRemoteResults|java.lang.String/2004016611|{"name":"tnrs_results.txt", \
52
"mode":"Best", "type":"Detailed", "encoding":"utf8", "dirty":"false", \
53
"sources":"false", "taxonomic":"false", "email":"tnrs@lka5jjs.orv", \
54
"key":"[key]"}|1|2|3|4|1|5|6|')
55
download_response_pattern = '^//OK\[1,\["(.*)"\],0,7\]$'
56
download_url_suffix = '&name=tnrs_results.txt&encoding=utf8'
57
58 5083 aaronmk
class InvalidResponse(Exception): pass
59
60 5144 aaronmk
def gwt_encode(str_):
61
    return strings.esc_quotes(strings.json_encode(str_), '|', quote_esc='\!')
62 4990 aaronmk
63 5149 aaronmk
def make_spliced_decode_map(decode_map):
64
    return [(r'(?: |(?<=\t)|^)'+re.escape(from_.strip())+r'(?: |(?=\t)|$)',
65
        strings.regexp_repl_esc(to)) for from_, to in decode_map]
66
67 5154 aaronmk
padding = ' !pad ' # prepend to empty and whitespace-only strings
68 5149 aaronmk
encode_map = [
69 5167 aaronmk
    ('!', ' !exc '), # our escape char
70
    ('\t', ' !tab '), # TNRS replaces with " "
71
    ('\n', ' !nl '), # used to separate multiple names
72
    ('\r', ' !cr '), # used to separate multiple names
73 5168 aaronmk
    ('"', ' !quo '), # TNRS removes it when at the beginning or end
74 5167 aaronmk
    ('%', ' !pct '), # TNRS URL-decodes it in matched fields
75 5168 aaronmk
    ("'", ' !apo '), # TNRS removes it when at the beginning or end
76 5167 aaronmk
    (';', ' !sem '), # changes TNRS response format
77
    ('\\', ' !bsl '), # TNRS removes it
78 5165 aaronmk
    ('_', ' !und '), # TNRS replaces with " "
79 5149 aaronmk
]
80
decode_map = strings.flip_map(encode_map)
81 5154 aaronmk
decode_map.append((padding, ''))
82 5149 aaronmk
spliced_decode_map = make_spliced_decode_map(decode_map)
83
84 5154 aaronmk
def encode(str_):
85
    str_ = strings.replace_all(encode_map, str_)
86
    # Empty and whitespace-only strings are ignored by TNRS (no response row)
87
    if str_.strip() == '': str_ = padding+str_
88
    return str_
89 5149 aaronmk
90
def decode(str_): return strings.replace_all_re(spliced_decode_map, str_)
91
92
decode_for_tsv_map = make_spliced_decode_map([(from_, strings.replace_all(
93
    csvs.tsv_encode_map, to)) for from_, to in decode_map])
94
95
def decode_for_tsv(str_):
96
    return strings.replace_all_re(decode_for_tsv_map, str_)
97
98
class TnrsOutputStream(streams.FilterStream):
99
    '''Decodes a TNRS response whose names were encoded with encode()'''
100
    def __init__(self, stream):
101
        streams.FilterStream.__init__(self, decode_for_tsv, stream)
102
103 5106 aaronmk
def parse_response(name, pattern, str_, response, response_info):
104
    match = re.match(pattern, str_)
105
    if not match:
106
        raise InvalidResponse('Invalid '+name+' response:\n'+response_info+'\n'
107
            +response)
108 4990 aaronmk
    return match.groups()
109
110 5121 aaronmk
def tnrs_request(names, debug=False):
111 5127 aaronmk
    '''
112
    Note that names containing only whitespace characters (after gwt_encode())
113
    are ignored by TNRS and do not receive a response row. Thus, you should
114
    always match up the Name_submitted returned by TNRS with the actual
115
    submitted name to determine the corresponding TNRS response row.
116
    '''
117 5121 aaronmk
    name_ct = len(names)
118
    assert name_ct <= max_names
119 4990 aaronmk
120
    # Logging
121
    def debug_log(label, str_=''):
122
        if debug: sys.stderr.write('\n'+label+':\n'+str_+'\n')
123
124
    ## HTTP
125 5119 aaronmk
    headers = initial_headers.copy() # don't modify global constant!
126 4990 aaronmk
127 5005 aaronmk
    def do_request(request):
128 4990 aaronmk
        debug_log('request', str(request))
129
        response = urllib2.urlopen(urllib2.Request(url, request, headers))
130
        response_str = streams.read_all(response)
131
        response_info = str(response.info())
132
        debug_log('response info', response_info)
133
        debug_log('response str', response_str)
134
        return response_str, response_info
135
136
    def do_repeated_request(request):
137
        pause = initial_pause
138
        total_pause = 0
139
        while True:
140
            total_pause += pause
141
            if total_pause > max_pause: raise # error is not temporary
142
            debug_log('total_pause', str(total_pause)+'s')
143
            time.sleep(pause) # wait for job to complete
144
145
            try: return do_request(request)
146
            except urllib2.HTTPError: pass # try again
147
            pause *= pause_growth_factor
148
149 5120 aaronmk
    profiler = profiling.ItersProfiler(start_now=True, iter_text='name')
150
    try:
151
        debug_log('Submit')
152 5121 aaronmk
        request = submission_request_template.replace('[names]',
153 5150 aaronmk
            gwt_encode('\n'.join(map(encode, names))))
154 5120 aaronmk
        response, response_info = do_request(request)
155 5151 aaronmk
        key, = parse_response('submission', submission_response_pattern,
156
            response, response, response_info)
157 5120 aaronmk
        debug_log('key', key)
158
        key_enc = gwt_encode(key)
159
160
        debug_log('Retrieve')
161
        request = retrieval_request_template.replace('[key]', key_enc)
162
        response, response_info = do_repeated_request(request)
163 5151 aaronmk
        parse_response('retrieval', retrieval_response_pattern, response,
164
            response, response_info)
165
        session_id, = parse_response('retrieval info',
166
            retrieval_response_info_pattern, response_info, response,
167 5120 aaronmk
            response_info)
168
        debug_log('session_id', session_id)
169
        headers['Cookie'] = 'JSESSIONID='+session_id
170
171
        # The output of the retrieve step is unusable because the array has
172 5151 aaronmk
        # different lengths depending on the taxonomic ranks present in the
173
        # provided taxon name. The extra download step is therefore necessary.
174 5120 aaronmk
175
        debug_log('Prepare download')
176
        request = download_request_template.replace('[key]', key_enc)
177
        response, response_info = do_request(request)
178 5151 aaronmk
        csv_url, = parse_response('download', download_response_pattern,
179
            response, response, response_info)
180 5120 aaronmk
        csv_url += download_url_suffix
181
        debug_log('csv_url', csv_url)
182
183
        debug_log('Download')
184
        response = urllib2.urlopen(urllib2.Request(csv_url))
185
        debug_log('response info', str(response.info()))
186 5150 aaronmk
        return TnrsOutputStream(response)
187 5120 aaronmk
    finally:
188 5121 aaronmk
        profiler.stop(name_ct)
189 5120 aaronmk
        sys.stderr.write(profiler.msg()+'\n')
190 5088 aaronmk
191 5121 aaronmk
def repeated_tnrs_request(names, debug=False, **kw_args):
192 5108 aaronmk
    for try_num in xrange(2):
193 5121 aaronmk
        try: return tnrs_request(names, debug, **kw_args)
194 5160 aaronmk
        except (urllib2.HTTPError, InvalidResponse), e:
195 5107 aaronmk
            exc.print_ex(e, detail=False)
196 5108 aaronmk
            debug = True
197
            # try again with debug turned on
198 5088 aaronmk
    raise # error is not temporary