1 |
5169
|
aaronmk
|
# TNRS
|
2 |
4990
|
aaronmk
|
|
3 |
13464
|
aaronmk
|
import os.path
|
4 |
4990
|
aaronmk
|
import re
|
5 |
|
|
import sys
|
6 |
|
|
import time
|
7 |
|
|
import urllib2
|
8 |
|
|
|
9 |
5149
|
aaronmk
|
import csvs
|
10 |
5107
|
aaronmk
|
import exc
|
11 |
5120
|
aaronmk
|
import profiling
|
12 |
4990
|
aaronmk
|
import streams
|
13 |
5144
|
aaronmk
|
import strings
|
14 |
4990
|
aaronmk
|
|
15 |
|
|
# Config
|
16 |
|
|
initial_pause = 0.35 # sec
|
17 |
|
|
pause_growth_factor = 1.3
|
18 |
5125
|
aaronmk
|
max_pause = 30*60 # sec; = 30 min; TNRS sometimes freezes for ~10 min
|
19 |
4990
|
aaronmk
|
assert initial_pause <= max_pause
|
20 |
13548
|
aaronmk
|
#max_names = 5000 #according to http://tnrs.iplantcollaborative.org/TNRSapp.html
|
21 |
13636
|
aaronmk
|
max_names = 500 # the maximum above crashes the live and dev TNRS servers
|
22 |
4990
|
aaronmk
|
|
23 |
|
|
# Protocol params
|
24 |
13464
|
aaronmk
|
server = streams.file_get_contents(os.path.dirname(__file__)+"/tnrs.url")
|
25 |
|
|
#server = 'tnrs.iplantcollaborative.org' # live server
|
26 |
13462
|
aaronmk
|
url_base = 'http://'+server+'/tnrsdemo/'
|
27 |
4990
|
aaronmk
|
url = url_base+'search'
|
28 |
|
|
initial_headers = {
|
29 |
|
|
'Content-Type': 'text/x-gwt-rpc; charset=utf-8',
|
30 |
|
|
'X-GWT-Module-Base': url_base,
|
31 |
|
|
'X-GWT-Permutation': '574AA16D15D917C7704646FD92AFF6B3',
|
32 |
|
|
}
|
33 |
|
|
submission_request_template = ('7|0|7|'+url_base+
|
34 |
9912
|
aaronmk
|
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService|\
|
35 |
13544
|
aaronmk
|
doSearch|java.lang.String/2004016611|{"sources":"gcc,tpl,tropicos,usda", \
|
36 |
9912
|
aaronmk
|
"names":"[names]", "type":"matching", "taxonomic":"true", \
|
37 |
|
|
"classification":"tropicos", "match_to_rank":"true"}|0.05|1|2|3|4|2|5|5|6|7|')
|
38 |
4990
|
aaronmk
|
submission_response_pattern = r'^//OK\[1,\["(\w+)"\],0,7\]$'
|
39 |
|
|
retrieval_request_template = ('7|0|15|'+url_base+
|
40 |
|
|
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService\
|
41 |
|
|
|getRemoteData|com.extjs.gxt.ui.client.data.PagingLoadConfig|\
|
42 |
|
|
java.lang.String/2004016611|com.extjs.gxt.ui.client.data.BasePagingLoadConfig/\
|
43 |
|
|
2011366567|com.extjs.gxt.ui.client.data.RpcMap/3441186752|sortField|sortDir|\
|
44 |
|
|
com.extjs.gxt.ui.client.Style$SortDir/640452531|offset|java.lang.Integer/\
|
45 |
|
|
3438268394|limit|{"email":"tnrs@lka5jjs.orv", "key":"[key]", \
|
46 |
13597
|
aaronmk
|
"taxonomic_constraint":"true", "source_sorting":"true", "first":"false"}\
|
47 |
4990
|
aaronmk
|
|1|2|3|4|2|5|6|7|0|1|8|4|9|0|10|11|0|12|13|0|14|13|100|15|')
|
48 |
13436
|
aaronmk
|
# taxonomic_constraint (Constrain by Higher Taxonomy): selects lower ranks
|
49 |
|
|
# only from within the matched higher ranks. must be turned on, to ensure
|
50 |
|
|
# that higher ranks are always authoritative.
|
51 |
14470
|
aaronmk
|
# source_sorting (Constrain by Source): always puts matches in the order of
|
52 |
|
|
# the sources, regardless of match score. should never be turned on,
|
53 |
|
|
# because it selects worse matches instead of better ones. *however*, since
|
54 |
|
|
# this is currently broken and always forced on, we turn it on so that the
|
55 |
|
|
# download settings reflect what TNRS actually used.
|
56 |
4990
|
aaronmk
|
retrieval_response_pattern = '^//OK\[.*?\["com.extjs.gxt.ui.client.data.\
|
57 |
|
|
BasePagingLoadResult/496878394","java.util.ArrayList/4159755760","org.iplantc.\
|
58 |
|
|
tnrs.demo.shared.BeanTNRSEntry/1039545748",".*"\],0,7\]$'
|
59 |
|
|
retrieval_response_info_pattern = r'(?ms).*^Set-Cookie: JSESSIONID=(\w+);'
|
60 |
|
|
download_request_template = ('7|0|6|'+url_base+
|
61 |
|
|
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService|\
|
62 |
|
|
downloadRemoteResults|java.lang.String/2004016611|{"name":"tnrs_results.txt", \
|
63 |
13855
|
aaronmk
|
"mode":"All", "type":"Detailed", "encoding":"utf8", "dirty":"false", \
|
64 |
9911
|
aaronmk
|
"sources":"false", "taxonomic":"true", "email":"tnrs@lka5jjs.orv", \
|
65 |
4990
|
aaronmk
|
"key":"[key]"}|1|2|3|4|1|5|6|')
|
66 |
13856
|
aaronmk
|
# dirty:
|
67 |
|
|
# when on, sometimes marks multiple names as Selected. must be turned *off*
|
68 |
13860
|
aaronmk
|
# so that only one name is marked as Selected. note that this actually used
|
69 |
|
|
# to be on in the web app (see r9910, 2013-6-18), but does not appear to be
|
70 |
|
|
# needed (the source_sorting bug alluded to in r9910 is not fixed by
|
71 |
|
|
# enabling the dirty setting).
|
72 |
4990
|
aaronmk
|
download_response_pattern = '^//OK\[1,\["(.*)"\],0,7\]$'
|
73 |
|
|
download_url_suffix = '&name=tnrs_results.txt&encoding=utf8'
|
74 |
|
|
|
75 |
5083
|
aaronmk
|
class InvalidResponse(Exception): pass
|
76 |
|
|
|
77 |
5144
|
aaronmk
|
def gwt_encode(str_):
|
78 |
|
|
return strings.esc_quotes(strings.json_encode(str_), '|', quote_esc='\!')
|
79 |
4990
|
aaronmk
|
|
80 |
5149
|
aaronmk
|
def make_spliced_decode_map(decode_map):
|
81 |
|
|
return [(r'(?: |(?<=\t)|^)'+re.escape(from_.strip())+r'(?: |(?=\t)|$)',
|
82 |
|
|
strings.regexp_repl_esc(to)) for from_, to in decode_map]
|
83 |
|
|
|
84 |
5154
|
aaronmk
|
padding = ' !pad ' # prepend to empty and whitespace-only strings
|
85 |
5149
|
aaronmk
|
encode_map = [
|
86 |
5167
|
aaronmk
|
('!', ' !exc '), # our escape char
|
87 |
|
|
('\t', ' !tab '), # TNRS replaces with " "
|
88 |
|
|
('\n', ' !nl '), # used to separate multiple names
|
89 |
|
|
('\r', ' !cr '), # used to separate multiple names
|
90 |
5168
|
aaronmk
|
('"', ' !quo '), # TNRS removes it when at the beginning or end
|
91 |
5167
|
aaronmk
|
('%', ' !pct '), # TNRS URL-decodes it in matched fields
|
92 |
5168
|
aaronmk
|
("'", ' !apo '), # TNRS removes it when at the beginning or end
|
93 |
5167
|
aaronmk
|
(';', ' !sem '), # changes TNRS response format
|
94 |
|
|
('\\', ' !bsl '), # TNRS removes it
|
95 |
5165
|
aaronmk
|
('_', ' !und '), # TNRS replaces with " "
|
96 |
5171
|
aaronmk
|
('', ' !sub '), # TNRS removes it
|
97 |
5169
|
aaronmk
|
('×', ' !mul '), # TNRS replaces with "x"
|
98 |
5149
|
aaronmk
|
]
|
99 |
|
|
decode_map = strings.flip_map(encode_map)
|
100 |
5154
|
aaronmk
|
decode_map.append((padding, ''))
|
101 |
5149
|
aaronmk
|
spliced_decode_map = make_spliced_decode_map(decode_map)
|
102 |
|
|
|
103 |
5154
|
aaronmk
|
def encode(str_):
|
104 |
|
|
str_ = strings.replace_all(encode_map, str_)
|
105 |
|
|
# Empty and whitespace-only strings are ignored by TNRS (no response row)
|
106 |
|
|
if str_.strip() == '': str_ = padding+str_
|
107 |
|
|
return str_
|
108 |
5149
|
aaronmk
|
|
109 |
|
|
def decode(str_): return strings.replace_all_re(spliced_decode_map, str_)
|
110 |
|
|
|
111 |
|
|
decode_for_tsv_map = make_spliced_decode_map([(from_, strings.replace_all(
|
112 |
|
|
csvs.tsv_encode_map, to)) for from_, to in decode_map])
|
113 |
|
|
|
114 |
|
|
def decode_for_tsv(str_):
|
115 |
|
|
return strings.replace_all_re(decode_for_tsv_map, str_)
|
116 |
|
|
|
117 |
|
|
class TnrsOutputStream(streams.FilterStream):
|
118 |
|
|
'''Decodes a TNRS response whose names were encoded with encode()'''
|
119 |
|
|
def __init__(self, stream):
|
120 |
|
|
streams.FilterStream.__init__(self, decode_for_tsv, stream)
|
121 |
|
|
|
122 |
5106
|
aaronmk
|
def parse_response(name, pattern, str_, response, response_info):
|
123 |
|
|
match = re.match(pattern, str_)
|
124 |
|
|
if not match:
|
125 |
|
|
raise InvalidResponse('Invalid '+name+' response:\n'+response_info+'\n'
|
126 |
|
|
+response)
|
127 |
4990
|
aaronmk
|
return match.groups()
|
128 |
|
|
|
129 |
9525
|
aaronmk
|
def single_tnrs_request(names, debug=False, cumulative_profiler=None):
|
130 |
5127
|
aaronmk
|
'''
|
131 |
|
|
Note that names containing only whitespace characters (after gwt_encode())
|
132 |
|
|
are ignored by TNRS and do not receive a response row. Thus, you should
|
133 |
|
|
always match up the Name_submitted returned by TNRS with the actual
|
134 |
|
|
submitted name to determine the corresponding TNRS response row.
|
135 |
|
|
'''
|
136 |
5121
|
aaronmk
|
name_ct = len(names)
|
137 |
|
|
assert name_ct <= max_names
|
138 |
4990
|
aaronmk
|
|
139 |
|
|
# Logging
|
140 |
|
|
def debug_log(label, str_=''):
|
141 |
|
|
if debug: sys.stderr.write('\n'+label+':\n'+str_+'\n')
|
142 |
|
|
|
143 |
|
|
## HTTP
|
144 |
5119
|
aaronmk
|
headers = initial_headers.copy() # don't modify global constant!
|
145 |
4990
|
aaronmk
|
|
146 |
5005
|
aaronmk
|
def do_request(request):
|
147 |
13857
|
aaronmk
|
request_obj = urllib2.Request(url, request, headers)
|
148 |
13859
|
aaronmk
|
debug_log('request URL', str(url))
|
149 |
13857
|
aaronmk
|
debug_log('request info', str(request_obj.header_items()))
|
150 |
|
|
debug_log('request str', str(request_obj.get_data()))
|
151 |
|
|
response = urllib2.urlopen(request_obj)
|
152 |
4990
|
aaronmk
|
response_str = streams.read_all(response)
|
153 |
|
|
response_info = str(response.info())
|
154 |
|
|
debug_log('response info', response_info)
|
155 |
|
|
debug_log('response str', response_str)
|
156 |
|
|
return response_str, response_info
|
157 |
|
|
|
158 |
|
|
def do_repeated_request(request):
|
159 |
|
|
pause = initial_pause
|
160 |
|
|
total_pause = 0
|
161 |
|
|
while True:
|
162 |
|
|
total_pause += pause
|
163 |
|
|
if total_pause > max_pause: raise # error is not temporary
|
164 |
|
|
debug_log('total_pause', str(total_pause)+'s')
|
165 |
|
|
time.sleep(pause) # wait for job to complete
|
166 |
|
|
|
167 |
|
|
try: return do_request(request)
|
168 |
|
|
except urllib2.HTTPError: pass # try again
|
169 |
|
|
pause *= pause_growth_factor
|
170 |
|
|
|
171 |
5120
|
aaronmk
|
profiler = profiling.ItersProfiler(start_now=True, iter_text='name')
|
172 |
|
|
try:
|
173 |
|
|
debug_log('Submit')
|
174 |
5121
|
aaronmk
|
request = submission_request_template.replace('[names]',
|
175 |
5150
|
aaronmk
|
gwt_encode('\n'.join(map(encode, names))))
|
176 |
5120
|
aaronmk
|
response, response_info = do_request(request)
|
177 |
5151
|
aaronmk
|
key, = parse_response('submission', submission_response_pattern,
|
178 |
|
|
response, response, response_info)
|
179 |
5120
|
aaronmk
|
debug_log('key', key)
|
180 |
|
|
key_enc = gwt_encode(key)
|
181 |
|
|
|
182 |
|
|
debug_log('Retrieve')
|
183 |
|
|
request = retrieval_request_template.replace('[key]', key_enc)
|
184 |
|
|
response, response_info = do_repeated_request(request)
|
185 |
5151
|
aaronmk
|
parse_response('retrieval', retrieval_response_pattern, response,
|
186 |
|
|
response, response_info)
|
187 |
|
|
session_id, = parse_response('retrieval info',
|
188 |
|
|
retrieval_response_info_pattern, response_info, response,
|
189 |
5120
|
aaronmk
|
response_info)
|
190 |
|
|
debug_log('session_id', session_id)
|
191 |
|
|
headers['Cookie'] = 'JSESSIONID='+session_id
|
192 |
|
|
|
193 |
14511
|
aaronmk
|
# the output of the retrieve step is unusable because the array does not
|
194 |
|
|
# contain all the columns, contains no column names, and has different
|
195 |
|
|
# lengths depending on the taxonomic ranks present in the provided taxon
|
196 |
|
|
# name. the extra download step is therefore necessary.
|
197 |
5120
|
aaronmk
|
|
198 |
|
|
debug_log('Prepare download')
|
199 |
|
|
request = download_request_template.replace('[key]', key_enc)
|
200 |
|
|
response, response_info = do_request(request)
|
201 |
5151
|
aaronmk
|
csv_url, = parse_response('download', download_response_pattern,
|
202 |
|
|
response, response, response_info)
|
203 |
5120
|
aaronmk
|
csv_url += download_url_suffix
|
204 |
|
|
debug_log('csv_url', csv_url)
|
205 |
|
|
|
206 |
|
|
debug_log('Download')
|
207 |
13858
|
aaronmk
|
request_obj = urllib2.Request(csv_url)
|
208 |
13859
|
aaronmk
|
debug_log('request URL', str(csv_url))
|
209 |
13858
|
aaronmk
|
debug_log('request info', str(request_obj.header_items()))
|
210 |
|
|
debug_log('request str', str(request_obj.get_data()))
|
211 |
|
|
response = urllib2.urlopen(request_obj)
|
212 |
|
|
response_info = str(response.info())
|
213 |
|
|
debug_log('response info', response_info)
|
214 |
5150
|
aaronmk
|
return TnrsOutputStream(response)
|
215 |
5120
|
aaronmk
|
finally:
|
216 |
5121
|
aaronmk
|
profiler.stop(name_ct)
|
217 |
5120
|
aaronmk
|
sys.stderr.write(profiler.msg()+'\n')
|
218 |
9525
|
aaronmk
|
|
219 |
|
|
if cumulative_profiler != None:
|
220 |
|
|
cumulative_profiler.add_subprofiler(profiler)
|
221 |
|
|
sys.stderr.write('Cumulatively: '+cumulative_profiler.msg()+'\n')
|
222 |
5088
|
aaronmk
|
|
223 |
9520
|
aaronmk
|
def tnrs_request(names, debug=False, **kw_args):
|
224 |
5108
|
aaronmk
|
for try_num in xrange(2):
|
225 |
9519
|
aaronmk
|
try: return single_tnrs_request(names, debug, **kw_args)
|
226 |
5160
|
aaronmk
|
except (urllib2.HTTPError, InvalidResponse), e:
|
227 |
5107
|
aaronmk
|
exc.print_ex(e, detail=False)
|
228 |
5108
|
aaronmk
|
debug = True
|
229 |
|
|
# try again with debug turned on
|
230 |
5088
|
aaronmk
|
raise # error is not temporary
|