Project

General

Profile

1
# TNRS
2

    
3
import re
4
import sys
5
import time
6
import urllib2
7

    
8
import streams
9

    
10
# Config
11
initial_pause = 0.35 # sec
12
pause_growth_factor = 1.3
13
max_pause = 600 # sec; = 10 min
14
assert initial_pause <= max_pause
15
max_taxons = 5000# according to http://tnrs.iplantcollaborative.org/TNRSapp.html
16

    
17
# Protocol params
18
url_base = 'http://tnrs.iplantcollaborative.org/tnrsdemo/'
19
url = url_base+'search'
20
initial_headers = {
21
    'Content-Type': 'text/x-gwt-rpc; charset=utf-8',
22
    'X-GWT-Module-Base': url_base,
23
    'X-GWT-Permutation': '574AA16D15D917C7704646FD92AFF6B3',
24
}
25
submission_request_template = ('7|0|7|'+url_base+
26
'||org.iplantc.tnrs.demo.client.SearchService|doSearch|\
27
java.lang.String/2004016611|{"sources":"gcc,tropicos,usda", "names":"[taxons]"\
28
, "type":"matching", "taxonomic":"true", "classification":"tropicos", \
29
"match_to_rank":"true"}|0.05|1|2|3|4|2|5|5|6|7|')
30
submission_response_pattern = r'^//OK\[1,\["(\w+)"\],0,7\]$'
31
retrieval_request_template = ('7|0|15|'+url_base+
32
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService\
33
|getRemoteData|com.extjs.gxt.ui.client.data.PagingLoadConfig|\
34
java.lang.String/2004016611|com.extjs.gxt.ui.client.data.BasePagingLoadConfig/\
35
2011366567|com.extjs.gxt.ui.client.data.RpcMap/3441186752|sortField|sortDir|\
36
com.extjs.gxt.ui.client.Style$SortDir/640452531|offset|java.lang.Integer/\
37
3438268394|limit|{"email":"tnrs@lka5jjs.orv", "key":"[key]", \
38
"taxonomic_constraint":"false", "source_sorting":"false", "first":"false"}\
39
|1|2|3|4|2|5|6|7|0|1|8|4|9|0|10|11|0|12|13|0|14|13|100|15|')
40
retrieval_response_pattern = '^//OK\[.*?\["com.extjs.gxt.ui.client.data.\
41
BasePagingLoadResult/496878394","java.util.ArrayList/4159755760","org.iplantc.\
42
tnrs.demo.shared.BeanTNRSEntry/1039545748",".*"\],0,7\]$'
43
retrieval_response_info_pattern = r'(?ms).*^Set-Cookie: JSESSIONID=(\w+);'
44
download_request_template = ('7|0|6|'+url_base+
45
'|1E87C78041CEFBF0992F46BDF84D7D60|org.iplantc.tnrs.demo.client.SearchService|\
46
downloadRemoteResults|java.lang.String/2004016611|{"name":"tnrs_results.txt", \
47
"mode":"Best", "type":"Detailed", "encoding":"utf8", "dirty":"false", \
48
"sources":"false", "taxonomic":"false", "email":"tnrs@lka5jjs.orv", \
49
"key":"[key]"}|1|2|3|4|1|5|6|')
50
download_response_pattern = '^//OK\[1,\["(.*)"\],0,7\]$'
51
download_url_suffix = '&name=tnrs_results.txt&encoding=utf8'
52

    
53
class InvalidResponse(Exception): pass
54

    
55
def gwt_encode(str_): return re.sub(r'[^\w.() -]+', r' ', str_)
56

    
57
def parse_response(name, pattern, response):
58
    match = re.match(pattern, response)
59
    if not match: raise InvalidResponse('Invalid '+name+' response:\n'+response)
60
    return match.groups()
61

    
62
def tnrs_request(taxons, debug=False):
63
    assert len(taxons) <= max_taxons
64
    
65
    # Logging
66
    def debug_log(label, str_=''):
67
        if debug: sys.stderr.write('\n'+label+':\n'+str_+'\n')
68
    
69
    ## HTTP
70
    headers = initial_headers
71
    
72
    def do_request(request):
73
        debug_log('request', str(request))
74
        response = urllib2.urlopen(urllib2.Request(url, request, headers))
75
        response_str = streams.read_all(response)
76
        response_info = str(response.info())
77
        debug_log('response info', response_info)
78
        debug_log('response str', response_str)
79
        return response_str, response_info
80
    
81
    def do_repeated_request(request):
82
        pause = initial_pause
83
        total_pause = 0
84
        while True:
85
            total_pause += pause
86
            if total_pause > max_pause: raise # error is not temporary
87
            debug_log('total_pause', str(total_pause)+'s')
88
            time.sleep(pause) # wait for job to complete
89
            
90
            try: return do_request(request)
91
            except urllib2.HTTPError: pass # try again
92
            pause *= pause_growth_factor
93
    
94
    debug_log('Submit')
95
    request = submission_request_template.replace('[taxons]',
96
        r'\\n'.join(map(gwt_encode, taxons))) # double-escape \n
97
    response, response_info = do_request(request)
98
    key, = parse_response('submission', submission_response_pattern, response)
99
    debug_log('key', key)
100
    key_enc = gwt_encode(key)
101
    
102
    debug_log('Retrieve')
103
    request = retrieval_request_template.replace('[key]', key_enc)
104
    response, response_info = do_repeated_request(request)
105
    parse_response('retrieval', retrieval_response_pattern, response)
106
    session_id, = parse_response('retrieval info',
107
        retrieval_response_info_pattern, response_info)
108
    debug_log('session_id', session_id)
109
    headers['Cookie'] = 'JSESSIONID='+session_id
110
    
111
    # The output of the retrieve step is unusable because the array has
112
    # different lengths depending on the taxonomic ranks present in the provided
113
    # taxon name. The extra download step is therefore necessary.
114
    
115
    debug_log('Prepare download')
116
    request = download_request_template.replace('[key]', key_enc)
117
    response, response_info = do_request(request)
118
    csv_url, = parse_response('download', download_response_pattern, response)
119
    csv_url += download_url_suffix
120
    debug_log('csv_url', csv_url)
121
    
122
    debug_log('Download')
123
    response = urllib2.urlopen(urllib2.Request(csv_url))
124
    debug_log('response info', str(response.info()))
125
    return response
(34-34/41)