Project

General

Profile

1
#!/usr/bin/env python
2
# Downloads REMIB data for all nodes
3
# Usage: env [start=...] [n=...] self 2>>log
4

    
5
import csv
6
import itertools
7
import os.path
8
import StringIO
9
import sys
10
import urllib2
11

    
12
sys.path.append(os.path.dirname(__file__)+"/../../../lib")
13

    
14
import exc
15
import opts
16
import profiling
17
import streams
18
import strings
19
import timeout
20
import util
21

    
22
timeout_ = 20 # sec
23

    
24
alphabet = map(chr, xrange(ord('A'), ord('Z')+1))
25

    
26
class InputException(Exception): pass
27

    
28
class EmptyResponseException(InputException): pass
29

    
30
def is_ignore(line):
31
    line = strings.remove_line_ending(line)
32
    return line == '' or line.startswith('\t') or line.find(',') < 0
33

    
34
def main():
35
    # Get config from env vars
36
    start = util.cast(int, opts.get_env_var('start', 1))
37
    end = util.cast(int, util.none_if(opts.get_env_var('n', None), u''))
38
    if end == None: end = 150 # about 120 nodes listed on the web form
39
    else: end += start
40
    
41
    def clear_line(): sys.stderr.write('\n')
42
    log_indent = 0
43
    def log(msg, line_ending='\n'):
44
        sys.stderr.write(('    '*log_indent)+msg+line_ending)
45
    
46
    os.chdir(os.path.dirname(__file__)) # dir of output files
47
    
48
    # Get by family ('familia') because that is the most general level at which
49
    # an identification can be made. This assumes all records have a family.
50
    url_template = ('http://www.conabio.gob.mx/remib/cgi-bin/'
51
        'remib_distribucion.cgi?lengua=EN&niveltax=familia&taxon=[prefix]%25&'
52
        'pais=Todos&pais_otro=&estado=100&formato=csv&mapa=no&mapabase=estados'
53
        '&coleccion=id%3D[node_id]')
54
    
55
    for node_id in xrange(start, end):
56
        log('Processing node #'+str(node_id)+'...')
57
        log_indent += 1
58
        profiler = profiling.ItersProfiler(start_now=True, iter_text='row')
59
        
60
        filename = 'node.'+str(node_id)+'.specimens.csv'
61
        out = streams.LineCountOutputStream(open(filename, 'w'))
62
        def log_ex(e):
63
            clear_line()
64
            log('! Output line '+str(out.line_num)+': '+exc.str_(e))
65
        start_line_num = out.line_num
66
        node_url_template = url_template.replace('[node_id]', str(node_id))
67
        
68
        for prefix_chars in itertools.product(alphabet, repeat=2):
69
            prefix = ''.join(prefix_chars)
70
            log('Processing prefix '+prefix+'...')
71
            row_ct = 0
72
            def print_status(line_ending='\n'):
73
                log('Processed '+str(row_ct)+' row(s)', line_ending)
74
            log_indent += 1
75
            
76
            url = node_url_template.replace('[prefix]', prefix)
77
            stream = streams.StreamIter(streams.TimeoutInputStream(
78
                urllib2.urlopen(url), timeout_))
79
            
80
            try:
81
                util.skip(stream, is_ignore) # skip header
82
                try: metadata_row = csv.reader(stream).next()
83
                except StopIteration: raise EmptyResponseException()
84
                if metadata_row[0] != 'COLLECTION': raise InputException(
85
                    'Invalid metadata row: '+str(metadata_row))
86
                
87
                # Copy lines
88
                for line in stream:
89
                    if is_ignore(line):
90
                        error = strings.remove_prefix('\t\t', line)
91
                        if len(error) != len(line): raise InputException(error)
92
                        break
93
                    out.write(line)
94
                    
95
                    row_ct += 1
96
                    if row_ct % 100 == 0: print_status('\r')
97
                        # CR at end so next print overwrites msg
98
            except EmptyResponseException, e: # must come before InputException
99
                log_ex(e)
100
                break # assume node doesn't exist, so abort node
101
            except InputException, e: log_ex(e)
102
            except timeout.TimeoutException, e:
103
                log_ex(e)
104
                break # assume node is down, so abort node
105
            finally: # still run if break is called
106
                stream.close()
107
                
108
                profiler.add_iters(row_ct)
109
                print_status()
110
                log_indent -= 1
111
            
112
        profiler.stop()
113
        log(profiler.msg())
114
        log_indent -= 1
115

    
116
main()
(2-2/2)