Project

General

Profile

1
#!/usr/bin/env python
2
# Loads a command's CSV output stream into a PostgreSQL table.
3
# The command may be run more than once.
4

    
5
import csv
6
import os.path
7
import re
8
import subprocess
9
import sys
10

    
11
sys.path.append(os.path.dirname(__file__)+"/../lib")
12

    
13
import csvs
14
import exc
15
import opts
16
import sql
17
import sql_gen
18
import streams
19
import strings
20
import util
21

    
22
def main():
23
    # Usage
24
    env_names = []
25
    def usage_err():
26
        raise SystemExit('Usage: '+opts.env_usage(env_names)+' '+sys.argv[0]
27
            +' input_cmd [args...]')
28
    
29
    # Parse args
30
    input_cmd = sys.argv[1:]
31
    
32
    # Get config from env vars
33
    table = opts.get_env_var('table', None, env_names)
34
    schema = opts.get_env_var('schema', 'public', env_names)
35
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
36
    errors_table_only = opts.env_flag('errors_table_only', False, env_names)
37
    verbosity = util.cast(float, opts.get_env_var('verbosity', 1, env_names))
38
    
39
    if not (input_cmd != [] and table != None and 'engine' in db_config):
40
        usage_err()
41
    
42
    # Connect to DB
43
    def log(msg, level=1):
44
        '''Higher level -> more verbose'''
45
        if level <= verbosity: sys.stderr.write(msg.rstrip('\n')+'\n')
46
    db = sql.connect(db_config, log_debug=log)
47
    
48
    def mk_errors_table():
49
        log('Creating errors table')
50
        errors_table = sql.errors_table(db, table, if_exists=False)
51
        sql.drop_table(db, errors_table)
52
        typed_cols = [
53
            sql_gen.TypedCol('column', 'text', nullable=False),
54
            sql_gen.TypedCol('value', 'text'),
55
            sql_gen.TypedCol('error_code', 'character varying(5)',
56
                nullable=False),
57
            sql_gen.TypedCol('error', 'text', nullable=False),
58
            ]
59
        sql.create_table(db, errors_table, typed_cols, has_pkey=False)
60
        index_cols = ['column', 'value', 'error_code', 'error']
61
        sql.add_index(db, index_cols, errors_table, unique=True)
62
        db.db.commit()
63
    
64
    use_copy_from = [True]
65
    
66
    # Loads data into the table using the currently-selected approach.
67
    def load():
68
        # Open input stream
69
        proc = subprocess.Popen(input_cmd, stdout=subprocess.PIPE, bufsize=-1)
70
        in_ = proc.stdout
71
        
72
        # Get format info
73
        info = csvs.stream_info(in_, parse_header=True)
74
        dialect = info.dialect
75
        if csvs.is_tsv(dialect): use_copy_from[0] = False
76
        col_names = info.header
77
        for i, col in enumerate(col_names): # replace empty column names
78
            if col == '': col_names[i] = 'column_'+str(i)
79
        
80
        # Select schema and escape names
81
        def esc_name(name): return sql.esc_name(db, name)
82
        sql.run_query(db, 'SET search_path TO '+esc_name(schema))
83
        
84
        typed_cols = [sql_gen.TypedCol('row_num', 'serial', nullable=False)]+[
85
            sql_gen.TypedCol(v, 'text') for v in col_names]
86
        
87
        def load_():
88
            log('Creating table')
89
            sql.create_table(db, table, typed_cols, col_indexes=False)
90
            
91
            # Create COPY FROM statement
92
            if use_copy_from[0]:
93
                cur = db.db.cursor()
94
                copy_from = ('COPY '+esc_name(table)+' ('
95
                    +(', '.join(map(esc_name, col_names)))
96
                    +') FROM STDIN DELIMITER %(delimiter)s NULL %(null)s')
97
                assert not csvs.is_tsv(dialect)
98
                copy_from += ' CSV'
99
                if dialect.quoting != csv.QUOTE_NONE:
100
                    copy_from += ' QUOTE %(quotechar)s'
101
                    if dialect.doublequote: copy_from += ' ESCAPE %(quotechar)s'
102
                copy_from += ';\n'
103
                copy_from = cur.mogrify(copy_from, dict(delimiter=
104
                    dialect.delimiter, null='', quotechar=dialect.quotechar))
105
            
106
            # Load the data
107
            line_in = streams.ProgressInputStream(in_, sys.stderr, n=1000)
108
            try:
109
                if use_copy_from[0]:
110
                    log('Using COPY FROM')
111
                    log(copy_from, level=2)
112
                    db.db.cursor().copy_expert(copy_from, line_in)
113
                else:
114
                    log('Using INSERT')
115
                    cols_ct = len(col_names)+1 # +1 for row_num
116
                    for row in csvs.make_reader(line_in, dialect):
117
                        row = map(strings.to_unicode, row)
118
                        row.insert(0, sql.default) # row_num is autogen
119
                        util.list_set_length(row, cols_ct) # truncate extra cols
120
                        sql.insert(db, table, row, log_level=4)
121
            finally:
122
                line_in.close() # also closes proc.stdout
123
                proc.wait()
124
        sql.with_savepoint(db, load_)
125
        db.db.commit()
126
        
127
        log('Cleaning up table')
128
        sql.cleanup_table(db, table, col_names)
129
        db.db.commit()
130
        
131
        log('Adding indexes')
132
        for col in typed_cols[1:]: # exclude pkey
133
            log('Adding index on '+col.name)
134
            sql.add_index(db, col.name, table, ensure_not_null=False)
135
            db.db.commit()
136
        
137
        log('Vacuuming table')
138
        db.db.rollback()
139
        sql.vacuum(db, table)
140
        
141
        mk_errors_table()
142
    
143
    if errors_table_only: mk_errors_table()
144
    else:
145
        try: load()
146
        except sql.DatabaseErrors, e:
147
            if use_copy_from[0]: # first try
148
                exc.print_ex(e, plain=True)
149
                use_copy_from[0] = False
150
                load() # try again with different approach
151
            else: raise
152

    
153
main()
(7-7/50)