Project

General

Profile

1
#!/usr/bin/env python
2
# Loads a command's CSV output stream into a PostgreSQL table.
3
# The command may be run more than once.
4

    
5
import csv
6
import os.path
7
import re
8
import subprocess
9
import sys
10

    
11
sys.path.append(os.path.dirname(__file__)+"/../lib")
12

    
13
import csvs
14
import exc
15
import opts
16
import sql
17
import sql_io
18
import sql_gen
19
import streams
20
import strings
21
import util
22

    
23
def main():
24
    # Usage
25
    env_names = []
26
    def usage_err():
27
        raise SystemExit('Usage: '+opts.env_usage(env_names)+' '+sys.argv[0]
28
            +' input_cmd [args...]')
29
    
30
    # Parse args
31
    input_cmd = sys.argv[1:]
32
    
33
    # Get config from env vars
34
    table = opts.get_env_var('table', None, env_names)
35
    schema = opts.get_env_var('schema', 'public', env_names)
36
    has_row_num = opts.env_flag('has_row_num', True, env_names)
37
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
38
    errors_table_only = opts.env_flag('errors_table_only', False, env_names)
39
    verbosity = util.cast(float, opts.get_env_var('verbosity', 3, env_names))
40
    
41
    if not ((errors_table_only or input_cmd != []) and table != None
42
        and 'engine' in db_config):
43
        usage_err()
44
    
45
    # Connect to DB
46
    def log(msg, level=1):
47
        '''Higher level -> more verbose'''
48
        if level <= verbosity:
49
            sys.stderr.write(strings.to_raw_str(msg.rstrip('\n')+'\n'))
50
    db = sql.connect(db_config, log_debug=log)
51
    
52
    table = sql_gen.Table(table, schema)
53
    
54
    log('Creating errors table')
55
    errors_table = sql_io.errors_table(db, table, if_exists=False)
56
    if errors_table_only: sql.drop_table(db, errors_table)
57
    typed_cols = [
58
        sql_gen.TypedCol('column', 'text', nullable=False),
59
        sql_gen.TypedCol('value', 'text'),
60
        sql_gen.TypedCol('error_code', 'character varying(5)',
61
            nullable=False),
62
        sql_gen.TypedCol('error', 'text', nullable=False),
63
        ]
64
    sql.create_table(db, errors_table, typed_cols, has_pkey=False)
65
    index_cols = ['column', 'value', 'error_code', 'error']
66
    sql.add_index(db, index_cols, errors_table, unique=True)
67
    
68
    use_copy_from = [True]
69
    
70
    # Loads data into the table using the currently-selected approach.
71
    def load():
72
        # Open input stream
73
        proc = subprocess.Popen(input_cmd, stdout=subprocess.PIPE, bufsize=-1)
74
        in_ = proc.stdout
75
        
76
        # Get format info
77
        info = csvs.stream_info(in_, parse_header=True)
78
        dialect = info.dialect
79
        if csvs.is_tsv(dialect): use_copy_from[0] = False
80
        col_names = map(strings.to_unicode, info.header)
81
        for i, col in enumerate(col_names): # replace empty column names
82
            if col == '': col_names[i] = 'column_'+str(i)
83
        
84
        # Select schema and escape names
85
        def esc_name(name): return db.esc_name(name)
86
        
87
        typed_cols = [sql_gen.TypedCol(v, 'text') for v in col_names]
88
        if has_row_num:
89
            typed_cols.insert(0, sql_gen.TypedCol('row_num', 'serial',
90
                nullable=False))
91
        
92
        log('Creating table')
93
        sql.create_table(db, table, typed_cols, has_pkey=has_row_num,
94
            col_indexes=False)
95
        
96
        # Remove rows from any failed COPY FROM
97
        sql.truncate(db, table)
98
        
99
        def load_():
100
            # Create COPY FROM statement
101
            if use_copy_from[0]:
102
                copy_from = ('COPY '+table.to_str(db)+' ('
103
                    +(', '.join(map(esc_name, col_names)))
104
                    +') FROM STDIN DELIMITER '+db.esc_value(dialect.delimiter)
105
                    +' NULL '+db.esc_value(''))
106
                assert not csvs.is_tsv(dialect)
107
                copy_from += ' CSV'
108
                if dialect.quoting != csv.QUOTE_NONE:
109
                    quote_str = db.esc_value(dialect.quotechar)
110
                    copy_from += ' QUOTE '+quote_str
111
                    if dialect.doublequote: copy_from += ' ESCAPE '+quote_str
112
                copy_from += ';\n'
113
            
114
            # Load the data
115
            line_in = streams.ProgressInputStream(in_, sys.stderr, n=1000)
116
            try:
117
                if use_copy_from[0]:
118
                    log('Using COPY FROM')
119
                    log(copy_from, level=2)
120
                    db.db.cursor().copy_expert(copy_from, line_in)
121
                else:
122
                    log('Using INSERT')
123
                    cols_ct = len(col_names)+1 # +1 for row_num
124
                    for row in csvs.make_reader(line_in, dialect):
125
                        row = map(strings.to_unicode, row)
126
                        row.insert(0, sql.default) # row_num is autogen
127
                        util.list_set_length(row, cols_ct) # truncate extra cols
128
                        sql.insert(db, table, row, cacheable=False, log_level=5)
129
            finally:
130
                line_in.close() # also closes proc.stdout
131
                proc.wait()
132
        sql.with_savepoint(db, load_)
133
        
134
        log('Cleaning up table')
135
        sql_io.cleanup_table(db, table, col_names)
136
        
137
        log('Vacuuming and reanalyzing table')
138
        sql.vacuum(db, table)
139
    
140
    if not errors_table_only:
141
        try: load()
142
        except sql.DatabaseErrors, e:
143
            if use_copy_from[0]: # first try
144
                exc.print_ex(e, plain=True)
145
                use_copy_from[0] = False
146
                load() # try again with different approach
147
            else: raise
148

    
149
main()
(8-8/58)