Revision 3079
Added by Aaron Marcuse-Kubitza over 12 years ago
sql.py | ||
---|---|---|
857 | 857 |
into=into, add_indexes_=True) |
858 | 858 |
return dict(items) |
859 | 859 |
|
860 |
##### Database structure queries
|
|
860 |
##### Database structure introspection
|
|
861 | 861 |
|
862 |
#### Tables |
|
863 |
|
|
864 |
def tables(db, schema_like='public', table_like='%', exact=False): |
|
865 |
if exact: compare = '=' |
|
866 |
else: compare = 'LIKE' |
|
867 |
|
|
868 |
module = util.root_module(db.db) |
|
869 |
if module == 'psycopg2': |
|
870 |
conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)), |
|
871 |
('tablename', sql_gen.CompareCond(table_like, compare))] |
|
872 |
return values(select(db, 'pg_tables', ['tablename'], conds, |
|
873 |
order_by='tablename', log_level=4)) |
|
874 |
elif module == 'MySQLdb': |
|
875 |
return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like) |
|
876 |
, cacheable=True, log_level=4)) |
|
877 |
else: raise NotImplementedError("Can't list tables for "+module+' database') |
|
878 |
|
|
879 |
def table_exists(db, table): |
|
880 |
table = sql_gen.as_Table(table) |
|
881 |
return list(tables(db, table.schema, table.name, exact=True)) != [] |
|
882 |
|
|
862 | 883 |
def table_row_count(db, table, recover=None): |
863 | 884 |
return value(run_query(db, mk_select(db, table, [sql_gen.row_count], |
864 | 885 |
order_by=None, start=0), recover=recover, log_level=3)) |
... | ... | |
936 | 957 |
else: raise NotImplementedError("Can't list constraint columns for "+module+ |
937 | 958 |
' database') |
938 | 959 |
|
960 |
#### Functions |
|
961 |
|
|
962 |
def function_exists(db, function): |
|
963 |
function = sql_gen.as_Function(function) |
|
964 |
|
|
965 |
info_table = sql_gen.Table('routines', 'information_schema') |
|
966 |
conds = [('routine_name', function.name)] |
|
967 |
schema = function.schema |
|
968 |
if schema != None: conds.append(('routine_schema', schema)) |
|
969 |
# Exclude trigger functions, since they cannot be called directly |
|
970 |
conds.append(('data_type', sql_gen.CompareCond('trigger', '!='))) |
|
971 |
|
|
972 |
return list(values(select(db, info_table, ['routine_name'], conds, |
|
973 |
order_by='routine_schema', limit=1, log_level=4))) != [] |
|
974 |
# TODO: order_by search_path schema order |
|
975 |
|
|
976 |
##### Structural changes |
|
977 |
|
|
978 |
#### Columns |
|
979 |
|
|
980 |
def add_col(db, table, col, comment=None, **kw_args): |
|
981 |
''' |
|
982 |
@param col TypedCol Name may be versioned, so be sure to propagate any |
|
983 |
renaming back to any source column for the TypedCol. |
|
984 |
@param comment None|str SQL comment used to distinguish columns of the same |
|
985 |
name from each other when they contain different data, to allow the |
|
986 |
ADD COLUMN query to be cached. If not set, query will not be cached. |
|
987 |
''' |
|
988 |
assert isinstance(col, sql_gen.TypedCol) |
|
989 |
|
|
990 |
while True: |
|
991 |
str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db) |
|
992 |
if comment != None: str_ += ' '+sql_gen.esc_comment(comment) |
|
993 |
|
|
994 |
try: |
|
995 |
run_query(db, str_, recover=True, cacheable=True, **kw_args) |
|
996 |
break |
|
997 |
except DuplicateException: |
|
998 |
col.name = next_version(col.name) |
|
999 |
# try again with next version of name |
|
1000 |
|
|
1001 |
def add_not_null(db, col): |
|
1002 |
table = col.table |
|
1003 |
col = sql_gen.to_name_only_col(col) |
|
1004 |
run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN ' |
|
1005 |
+col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3) |
|
1006 |
|
|
939 | 1007 |
row_num_col = '_row_num' |
940 | 1008 |
|
1009 |
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False, |
|
1010 |
constraints='PRIMARY KEY') |
|
1011 |
|
|
1012 |
def add_row_num(db, table): |
|
1013 |
'''Adds a row number column to a table. Its name is in row_num_col. It will |
|
1014 |
be the primary key.''' |
|
1015 |
add_col(db, table, row_num_typed_col, log_level=3) |
|
1016 |
|
|
1017 |
#### Indexes |
|
1018 |
|
|
1019 |
def add_pkey(db, table, cols=None, recover=None): |
|
1020 |
'''Adds a primary key. |
|
1021 |
@param cols [sql_gen.Col,...] The columns in the primary key. |
|
1022 |
Defaults to the first column in the table. |
|
1023 |
@pre The table must not already have a primary key. |
|
1024 |
''' |
|
1025 |
table = sql_gen.as_Table(table) |
|
1026 |
if cols == None: cols = [pkey(db, table, recover)] |
|
1027 |
col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols] |
|
1028 |
|
|
1029 |
run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY (' |
|
1030 |
+(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3, |
|
1031 |
log_ignore_excs=(DuplicateException,)) |
|
1032 |
|
|
941 | 1033 |
def add_index(db, exprs, table=None, unique=False, ensure_not_null_=True): |
942 | 1034 |
'''Adds an index on column(s) or expression(s) if it doesn't already exist. |
943 | 1035 |
Currently, only function calls are supported as expressions. |
... | ... | |
994 | 1086 |
index.name = next_version(index.name) |
995 | 1087 |
# try again with next version of name |
996 | 1088 |
|
997 |
def add_pkey(db, table, cols=None, recover=None): |
|
998 |
'''Adds a primary key. |
|
999 |
@param cols [sql_gen.Col,...] The columns in the primary key. |
|
1000 |
Defaults to the first column in the table. |
|
1001 |
@pre The table must not already have a primary key. |
|
1002 |
''' |
|
1003 |
table = sql_gen.as_Table(table) |
|
1004 |
if cols == None: cols = [pkey(db, table, recover)] |
|
1005 |
col_strs = [sql_gen.to_name_only_col(v).to_str(db) for v in cols] |
|
1006 |
|
|
1007 |
run_query(db, 'ALTER TABLE '+table.to_str(db)+' ADD PRIMARY KEY (' |
|
1008 |
+(', '.join(col_strs))+')', recover=True, cacheable=True, log_level=3, |
|
1009 |
log_ignore_excs=(DuplicateException,)) |
|
1010 |
|
|
1011 |
def add_not_null(db, col): |
|
1012 |
table = col.table |
|
1013 |
col = sql_gen.to_name_only_col(col) |
|
1014 |
run_query(db, 'ALTER TABLE '+table.to_str(db)+' ALTER COLUMN ' |
|
1015 |
+col.to_str(db)+' SET NOT NULL', cacheable=True, log_level=3) |
|
1016 |
|
|
1017 | 1089 |
def add_index_col(db, col, suffix, expr, nullable=True): |
1018 | 1090 |
if sql_gen.index_col(col) != None: return # already has index col |
1019 | 1091 |
|
... | ... | |
1048 | 1120 |
|
1049 | 1121 |
return expr |
1050 | 1122 |
|
1051 |
already_indexed = object() # tells add_indexes() the pkey has already been added
|
|
1123 |
#### Tables
|
|
1052 | 1124 |
|
1053 |
def add_indexes(db, table, has_pkey=True): |
|
1054 |
'''Adds an index on all columns in a table. |
|
1055 |
@param has_pkey bool|already_indexed Whether a pkey instead of a regular |
|
1056 |
index should be added on the first column. |
|
1057 |
* If already_indexed, the pkey is assumed to have already been added |
|
1058 |
''' |
|
1059 |
cols = table_cols(db, table) |
|
1060 |
if has_pkey: |
|
1061 |
if has_pkey is not already_indexed: add_pkey(db, table) |
|
1062 |
cols = cols[1:] |
|
1063 |
for col in cols: add_index(db, col, table) |
|
1125 |
### Maintenance |
|
1064 | 1126 |
|
1065 |
def add_col(db, table, col, comment=None, **kw_args): |
|
1066 |
''' |
|
1067 |
@param col TypedCol Name may be versioned, so be sure to propagate any |
|
1068 |
renaming back to any source column for the TypedCol. |
|
1069 |
@param comment None|str SQL comment used to distinguish columns of the same |
|
1070 |
name from each other when they contain different data, to allow the |
|
1071 |
ADD COLUMN query to be cached. If not set, query will not be cached. |
|
1072 |
''' |
|
1073 |
assert isinstance(col, sql_gen.TypedCol) |
|
1074 |
|
|
1075 |
while True: |
|
1076 |
str_ = 'ALTER TABLE '+table.to_str(db)+' ADD COLUMN '+col.to_str(db) |
|
1077 |
if comment != None: str_ += ' '+sql_gen.esc_comment(comment) |
|
1078 |
|
|
1079 |
try: |
|
1080 |
run_query(db, str_, recover=True, cacheable=True, **kw_args) |
|
1081 |
break |
|
1082 |
except DuplicateException: |
|
1083 |
col.name = next_version(col.name) |
|
1084 |
# try again with next version of name |
|
1127 |
def analyze(db, table): |
|
1128 |
table = sql_gen.as_Table(table) |
|
1129 |
run_query(db, 'ANALYZE '+table.to_str(db), log_level=3) |
|
1085 | 1130 |
|
1086 |
row_num_typed_col = sql_gen.TypedCol(row_num_col, 'serial', nullable=False,
|
|
1087 |
constraints='PRIMARY KEY')
|
|
1131 |
def autoanalyze(db, table):
|
|
1132 |
if db.autoanalyze: analyze(db, table)
|
|
1088 | 1133 |
|
1089 |
def add_row_num(db, table):
|
|
1090 |
'''Adds a row number column to a table. Its name is in row_num_col. It will
|
|
1091 |
be the primary key.'''
|
|
1092 |
add_col(db, table, row_num_typed_col, log_level=3)
|
|
1134 |
def vacuum(db, table):
|
|
1135 |
table = sql_gen.as_Table(table)
|
|
1136 |
db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db),
|
|
1137 |
log_level=3))
|
|
1093 | 1138 |
|
1139 |
### Lifecycle |
|
1140 |
|
|
1094 | 1141 |
def drop_table(db, table): |
1095 | 1142 |
table = sql_gen.as_Table(table) |
1096 | 1143 |
return run_query(db, 'DROP TABLE IF EXISTS '+table.to_str(db)+' CASCADE') |
... | ... | |
1121 | 1168 |
if isinstance(col_indexes, list): col_indexes[0] = add_indexes_ # defer |
1122 | 1169 |
elif col_indexes: add_indexes_() # add now |
1123 | 1170 |
|
1124 |
def analyze(db, table): |
|
1125 |
table = sql_gen.as_Table(table) |
|
1126 |
run_query(db, 'ANALYZE '+table.to_str(db), log_level=3) |
|
1171 |
already_indexed = object() # tells add_indexes() the pkey has already been added |
|
1127 | 1172 |
|
1128 |
def autoanalyze(db, table): |
|
1129 |
if db.autoanalyze: analyze(db, table) |
|
1173 |
def add_indexes(db, table, has_pkey=True): |
|
1174 |
'''Adds an index on all columns in a table. |
|
1175 |
@param has_pkey bool|already_indexed Whether a pkey instead of a regular |
|
1176 |
index should be added on the first column. |
|
1177 |
* If already_indexed, the pkey is assumed to have already been added |
|
1178 |
''' |
|
1179 |
cols = table_cols(db, table) |
|
1180 |
if has_pkey: |
|
1181 |
if has_pkey is not already_indexed: add_pkey(db, table) |
|
1182 |
cols = cols[1:] |
|
1183 |
for col in cols: add_index(db, col, table) |
|
1130 | 1184 |
|
1131 |
def vacuum(db, table): |
|
1132 |
table = sql_gen.as_Table(table) |
|
1133 |
db.with_autocommit(lambda: run_query(db, 'VACUUM ANALYZE '+table.to_str(db), |
|
1134 |
log_level=3)) |
|
1185 |
### Data |
|
1135 | 1186 |
|
1136 | 1187 |
def truncate(db, table, schema='public', **kw_args): |
1137 | 1188 |
'''For params, see run_query()''' |
... | ... | |
1143 | 1194 |
tables = lists.mk_seq(tables) |
1144 | 1195 |
for table in tables: truncate(db, table, log_level=3) |
1145 | 1196 |
|
1146 |
def tables(db, schema_like='public', table_like='%', exact=False): |
|
1147 |
if exact: compare = '=' |
|
1148 |
else: compare = 'LIKE' |
|
1149 |
|
|
1150 |
module = util.root_module(db.db) |
|
1151 |
if module == 'psycopg2': |
|
1152 |
conds = [('schemaname', sql_gen.CompareCond(schema_like, compare)), |
|
1153 |
('tablename', sql_gen.CompareCond(table_like, compare))] |
|
1154 |
return values(select(db, 'pg_tables', ['tablename'], conds, |
|
1155 |
order_by='tablename', log_level=4)) |
|
1156 |
elif module == 'MySQLdb': |
|
1157 |
return values(run_query(db, 'SHOW TABLES LIKE '+db.esc_value(table_like) |
|
1158 |
, cacheable=True, log_level=4)) |
|
1159 |
else: raise NotImplementedError("Can't list tables for "+module+' database') |
|
1160 |
|
|
1161 |
def table_exists(db, table): |
|
1162 |
table = sql_gen.as_Table(table) |
|
1163 |
return list(tables(db, table.schema, table.name, exact=True)) != [] |
|
1164 |
|
|
1165 |
def function_exists(db, function): |
|
1166 |
function = sql_gen.as_Function(function) |
|
1167 |
|
|
1168 |
info_table = sql_gen.Table('routines', 'information_schema') |
|
1169 |
conds = [('routine_name', function.name)] |
|
1170 |
schema = function.schema |
|
1171 |
if schema != None: conds.append(('routine_schema', schema)) |
|
1172 |
# Exclude trigger functions, since they cannot be called directly |
|
1173 |
conds.append(('data_type', sql_gen.CompareCond('trigger', '!='))) |
|
1174 |
|
|
1175 |
return list(values(select(db, info_table, ['routine_name'], conds, |
|
1176 |
order_by='routine_schema', limit=1, log_level=4))) != [] |
|
1177 |
# TODO: order_by search_path schema order |
|
1178 |
|
|
1179 |
##### Database management |
|
1180 |
|
|
1181 | 1197 |
def empty_db(db, schema='public', **kw_args): |
1182 | 1198 |
'''For kw_args, see tables()''' |
1183 | 1199 |
for table in tables(db, schema, **kw_args): truncate(db, table, schema) |
1184 | 1200 |
|
1201 |
##### Database management |
|
1202 |
|
|
1185 | 1203 |
##### Data cleanup |
1186 | 1204 |
|
1187 | 1205 |
def cleanup_table(db, table, cols): |
Also available in: Unified diff
sql.py: Organized Database structure introspection and Structural changes functions into subsections