1 |
1942
|
aaronmk
|
#!/usr/bin/env python
|
2 |
|
|
# Loads a command's CSV output stream into a PostgreSQL table.
|
3 |
|
|
# The command may be run more than once.
|
4 |
|
|
|
5 |
|
|
import csv
|
6 |
|
|
import os.path
|
7 |
1963
|
aaronmk
|
import re
|
8 |
1942
|
aaronmk
|
import subprocess
|
9 |
|
|
import sys
|
10 |
|
|
|
11 |
|
|
sys.path.append(os.path.dirname(__file__)+"/../lib")
|
12 |
|
|
|
13 |
|
|
import csvs
|
14 |
|
|
import exc
|
15 |
|
|
import opts
|
16 |
|
|
import sql
|
17 |
2680
|
aaronmk
|
import sql_gen
|
18 |
1942
|
aaronmk
|
import streams
|
19 |
1963
|
aaronmk
|
import strings
|
20 |
1965
|
aaronmk
|
import util
|
21 |
1942
|
aaronmk
|
|
22 |
|
|
def main():
|
23 |
|
|
# Usage
|
24 |
|
|
env_names = []
|
25 |
|
|
def usage_err():
|
26 |
|
|
raise SystemExit('Usage: '+opts.env_usage(env_names)+' '+sys.argv[0]
|
27 |
|
|
+' input_cmd [args...]')
|
28 |
|
|
|
29 |
|
|
# Parse args
|
30 |
|
|
input_cmd = sys.argv[1:]
|
31 |
|
|
|
32 |
|
|
# Get config from env vars
|
33 |
|
|
table = opts.get_env_var('table', None, env_names)
|
34 |
|
|
schema = opts.get_env_var('schema', 'public', env_names)
|
35 |
|
|
db_config = opts.get_env_vars(sql.db_config_names, None, env_names)
|
36 |
2890
|
aaronmk
|
errors_table_only = opts.env_flag('errors_table_only', False, env_names)
|
37 |
2680
|
aaronmk
|
verbosity = util.cast(float, opts.get_env_var('verbosity', 1, env_names))
|
38 |
1942
|
aaronmk
|
|
39 |
2890
|
aaronmk
|
if not (input_cmd != [] and table != None and 'engine' in db_config):
|
40 |
|
|
usage_err()
|
41 |
|
|
|
42 |
1942
|
aaronmk
|
# Connect to DB
|
43 |
2680
|
aaronmk
|
def log(msg, level=1):
|
44 |
|
|
'''Higher level -> more verbose'''
|
45 |
|
|
if level <= verbosity: sys.stderr.write(msg.rstrip('\n')+'\n')
|
46 |
|
|
db = sql.connect(db_config, log_debug=log)
|
47 |
1942
|
aaronmk
|
|
48 |
2890
|
aaronmk
|
def mk_errors_table():
|
49 |
|
|
log('Creating errors table')
|
50 |
|
|
errors_table = sql.errors_table(db, table, if_exists=False)
|
51 |
|
|
sql.drop_table(db, errors_table)
|
52 |
|
|
typed_cols = [
|
53 |
|
|
sql_gen.TypedCol('column', 'text', nullable=False),
|
54 |
|
|
sql_gen.TypedCol('value', 'text'),
|
55 |
|
|
sql_gen.TypedCol('error_code', 'character varying(5)',
|
56 |
|
|
nullable=False),
|
57 |
|
|
sql_gen.TypedCol('error', 'text', nullable=False),
|
58 |
|
|
]
|
59 |
|
|
sql.create_table(db, errors_table, typed_cols, has_pkey=False)
|
60 |
|
|
index_cols = ['column', 'value', 'error_code', 'error']
|
61 |
|
|
sql.add_index(db, index_cols, errors_table, unique=True)
|
62 |
|
|
|
63 |
1963
|
aaronmk
|
use_copy_from = [True]
|
64 |
|
|
|
65 |
|
|
# Loads data into the table using the currently-selected approach.
|
66 |
2680
|
aaronmk
|
def load():
|
67 |
1942
|
aaronmk
|
# Open input stream
|
68 |
|
|
proc = subprocess.Popen(input_cmd, stdout=subprocess.PIPE, bufsize=-1)
|
69 |
|
|
in_ = proc.stdout
|
70 |
|
|
|
71 |
|
|
# Get format info
|
72 |
|
|
info = csvs.stream_info(in_, parse_header=True)
|
73 |
|
|
dialect = info.dialect
|
74 |
1963
|
aaronmk
|
if csvs.is_tsv(dialect): use_copy_from[0] = False
|
75 |
2680
|
aaronmk
|
col_names = info.header
|
76 |
|
|
for i, col in enumerate(col_names): # replace empty column names
|
77 |
|
|
if col == '': col_names[i] = 'column_'+str(i)
|
78 |
1942
|
aaronmk
|
|
79 |
1963
|
aaronmk
|
# Select schema and escape names
|
80 |
2062
|
aaronmk
|
def esc_name(name): return sql.esc_name(db, name)
|
81 |
1963
|
aaronmk
|
sql.run_query(db, 'SET search_path TO '+esc_name(schema))
|
82 |
1942
|
aaronmk
|
|
83 |
2876
|
aaronmk
|
typed_cols = [sql_gen.TypedCol('row_num', 'serial', nullable=False)]+[
|
84 |
2759
|
aaronmk
|
sql_gen.TypedCol(v, 'text') for v in col_names]
|
85 |
|
|
|
86 |
2680
|
aaronmk
|
def load_():
|
87 |
|
|
log('Creating table')
|
88 |
2760
|
aaronmk
|
sql.create_table(db, table, typed_cols, col_indexes=False)
|
89 |
2680
|
aaronmk
|
|
90 |
|
|
# Create COPY FROM statement
|
91 |
|
|
if use_copy_from[0]:
|
92 |
|
|
cur = db.db.cursor()
|
93 |
|
|
copy_from = ('COPY '+esc_name(table)+' ('
|
94 |
|
|
+(', '.join(map(esc_name, col_names)))
|
95 |
|
|
+') FROM STDIN DELIMITER %(delimiter)s NULL %(null)s')
|
96 |
|
|
assert not csvs.is_tsv(dialect)
|
97 |
|
|
copy_from += ' CSV'
|
98 |
|
|
if dialect.quoting != csv.QUOTE_NONE:
|
99 |
|
|
copy_from += ' QUOTE %(quotechar)s'
|
100 |
|
|
if dialect.doublequote: copy_from += ' ESCAPE %(quotechar)s'
|
101 |
|
|
copy_from += ';\n'
|
102 |
|
|
copy_from = cur.mogrify(copy_from, dict(delimiter=
|
103 |
|
|
dialect.delimiter, null='', quotechar=dialect.quotechar))
|
104 |
|
|
|
105 |
|
|
# Load the data
|
106 |
2892
|
aaronmk
|
line_in = streams.ProgressInputStream(in_, sys.stderr, n=1000)
|
107 |
2680
|
aaronmk
|
try:
|
108 |
|
|
if use_copy_from[0]:
|
109 |
|
|
log('Using COPY FROM')
|
110 |
|
|
log(copy_from, level=2)
|
111 |
|
|
db.db.cursor().copy_expert(copy_from, line_in)
|
112 |
|
|
else:
|
113 |
|
|
log('Using INSERT')
|
114 |
|
|
cols_ct = len(col_names)+1 # +1 for row_num
|
115 |
|
|
for row in csvs.make_reader(line_in, dialect):
|
116 |
|
|
row = map(strings.to_unicode, row)
|
117 |
|
|
row.insert(0, sql.default) # row_num is autogen
|
118 |
|
|
util.list_set_length(row, cols_ct) # truncate extra cols
|
119 |
2926
|
aaronmk
|
sql.insert(db, table, row, log_level=5)
|
120 |
2680
|
aaronmk
|
finally:
|
121 |
|
|
line_in.close() # also closes proc.stdout
|
122 |
|
|
proc.wait()
|
123 |
|
|
sql.with_savepoint(db, load_)
|
124 |
1963
|
aaronmk
|
|
125 |
2680
|
aaronmk
|
log('Cleaning up table')
|
126 |
|
|
sql.cleanup_table(db, table, col_names)
|
127 |
1942
|
aaronmk
|
|
128 |
2680
|
aaronmk
|
log('Adding indexes')
|
129 |
2759
|
aaronmk
|
for col in typed_cols[1:]: # exclude pkey
|
130 |
|
|
log('Adding index on '+col.name)
|
131 |
2875
|
aaronmk
|
sql.add_index(db, col.name, table, ensure_not_null=False)
|
132 |
2682
|
aaronmk
|
|
133 |
2685
|
aaronmk
|
log('Vacuuming table')
|
134 |
|
|
db.db.rollback()
|
135 |
|
|
sql.vacuum(db, table)
|
136 |
|
|
|
137 |
2890
|
aaronmk
|
mk_errors_table()
|
138 |
1942
|
aaronmk
|
|
139 |
2890
|
aaronmk
|
if errors_table_only: mk_errors_table()
|
140 |
|
|
else:
|
141 |
|
|
try: load()
|
142 |
|
|
except sql.DatabaseErrors, e:
|
143 |
|
|
if use_copy_from[0]: # first try
|
144 |
|
|
exc.print_ex(e, plain=True)
|
145 |
|
|
use_copy_from[0] = False
|
146 |
|
|
load() # try again with different approach
|
147 |
|
|
else: raise
|
148 |
1942
|
aaronmk
|
|
149 |
|
|
main()
|