Project

General

Profile

1 1942 aaronmk
#!/usr/bin/env python
2
# Loads a command's CSV output stream into a PostgreSQL table.
3
# The command may be run more than once.
4
5
import csv
6
import os.path
7 1963 aaronmk
import re
8 1942 aaronmk
import subprocess
9
import sys
10
11
sys.path.append(os.path.dirname(__file__)+"/../lib")
12
13
import csvs
14
import exc
15
import opts
16
import sql
17 2680 aaronmk
import sql_gen
18 1942 aaronmk
import streams
19 1963 aaronmk
import strings
20 1965 aaronmk
import util
21 1942 aaronmk
22
def main():
23
    # Usage
24
    env_names = []
25
    def usage_err():
26
        raise SystemExit('Usage: '+opts.env_usage(env_names)+' '+sys.argv[0]
27
            +' input_cmd [args...]')
28
29
    # Parse args
30
    input_cmd = sys.argv[1:]
31
    if input_cmd == []: usage_err()
32
33
    # Get config from env vars
34
    table = opts.get_env_var('table', None, env_names)
35
    schema = opts.get_env_var('schema', 'public', env_names)
36
    db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
37 2680 aaronmk
    verbosity = util.cast(float, opts.get_env_var('verbosity', 1, env_names))
38 1942 aaronmk
    if not (table != None and 'engine' in db_config): usage_err()
39
40
    # Connect to DB
41 2680 aaronmk
    def log(msg, level=1):
42
        '''Higher level -> more verbose'''
43
        if level <= verbosity: sys.stderr.write(msg.rstrip('\n')+'\n')
44
    db = sql.connect(db_config, log_debug=log)
45 1942 aaronmk
46 1963 aaronmk
    use_copy_from = [True]
47
48
    # Loads data into the table using the currently-selected approach.
49 2680 aaronmk
    def load():
50 1942 aaronmk
        # Open input stream
51
        proc = subprocess.Popen(input_cmd, stdout=subprocess.PIPE, bufsize=-1)
52
        in_ = proc.stdout
53
54
        # Get format info
55
        info = csvs.stream_info(in_, parse_header=True)
56
        dialect = info.dialect
57 1963 aaronmk
        if csvs.is_tsv(dialect): use_copy_from[0] = False
58 2680 aaronmk
        col_names = info.header
59
        for i, col in enumerate(col_names): # replace empty column names
60
            if col == '': col_names[i] = 'column_'+str(i)
61 1942 aaronmk
62 1963 aaronmk
        # Select schema and escape names
63 2062 aaronmk
        def esc_name(name): return sql.esc_name(db, name)
64 1963 aaronmk
        sql.run_query(db, 'SET search_path TO '+esc_name(schema))
65 1942 aaronmk
66 2680 aaronmk
        def load_():
67
            log('Creating table')
68
            typed_cols = [sql_gen.TypedCol('row_num', 'serial')]+[
69
                sql_gen.TypedCol(v, 'text') for v in col_names]
70
            sql.create_table(db, table, typed_cols)
71
72
            # Create COPY FROM statement
73
            if use_copy_from[0]:
74
                cur = db.db.cursor()
75
                copy_from = ('COPY '+esc_name(table)+' ('
76
                    +(', '.join(map(esc_name, col_names)))
77
                    +') FROM STDIN DELIMITER %(delimiter)s NULL %(null)s')
78
                assert not csvs.is_tsv(dialect)
79
                copy_from += ' CSV'
80
                if dialect.quoting != csv.QUOTE_NONE:
81
                    copy_from += ' QUOTE %(quotechar)s'
82
                    if dialect.doublequote: copy_from += ' ESCAPE %(quotechar)s'
83
                copy_from += ';\n'
84
                copy_from = cur.mogrify(copy_from, dict(delimiter=
85
                    dialect.delimiter, null='', quotechar=dialect.quotechar))
86
87
            # Load the data
88
            line_in = streams.ProgressInputStream(in_, sys.stderr,
89
                'Processed %d row(s)', n=1000)
90
            try:
91
                if use_copy_from[0]:
92
                    log('Using COPY FROM')
93
                    log(copy_from, level=2)
94
                    db.db.cursor().copy_expert(copy_from, line_in)
95
                else:
96
                    log('Using INSERT')
97
                    cols_ct = len(col_names)+1 # +1 for row_num
98
                    for row in csvs.make_reader(line_in, dialect):
99
                        row = map(strings.to_unicode, row)
100
                        row.insert(0, sql.default) # row_num is autogen
101
                        util.list_set_length(row, cols_ct) # truncate extra cols
102
                        sql.insert(db, table, row, log_level=4)
103
            finally:
104
                line_in.close() # also closes proc.stdout
105
                proc.wait()
106
        sql.with_savepoint(db, load_)
107
        db.db.commit()
108 1963 aaronmk
109 2680 aaronmk
        log('Cleaning up table')
110
        sql.cleanup_table(db, table, col_names)
111
        db.db.commit()
112 1942 aaronmk
113 2680 aaronmk
        log('Adding indexes')
114
        for name in col_names:
115
            log('Adding index on '+name)
116
            sql.add_index(db, sql_gen.Col(name, table))
117
            db.db.commit()
118 2682 aaronmk
119
        log('Creating errors table')
120
        typed_cols = [sql_gen.TypedCol('column', 'text NOT NULL'),
121
            sql_gen.TypedCol('value', 'text'),
122
            sql_gen.TypedCol('error', 'text')]
123
        sql.create_table(db, table+'_errors', typed_cols, has_pkey=False)
124
        db.db.commit()
125 1942 aaronmk
126 1963 aaronmk
    try: load()
127
    except sql.DatabaseErrors, e:
128
        if use_copy_from[0]: # first try
129
            exc.print_ex(e, plain=True)
130
            use_copy_from[0] = False
131
            load() # try again with different approach
132
        else: raise e
133 1942 aaronmk
134
main()