Project

General

Profile

1
selfDir_uZPPqC := $(dir $(lastword $(MAKEFILE_LIST)))
2
root := $(selfDir_uZPPqC)..
3
include $(root)/lib/common.Makefile
4

    
5

    
6
##### Configuration
7

    
8
# Command line
9
log ?= $(if $(test),,1)
10
profile ?=
11
quiet ?=
12
reverify ?= 1
13
schema_only ?=
14
use_staged ?= $(by_col)
15

    
16
# Makefile
17
exts ?= csv tsv txt dmp xml
18
test_n ?= 2
19

    
20
##### Vars/functions
21

    
22
# Paths
23
datasrc := $(patsubst .%,%,$(notdir $(realpath .)))
24
bin := $(root)/bin
25
mappings := $(root)/mappings
26

    
27
# Make
28
SHELL := /bin/bash
29
selfMake = $(MAKE) --makefile=../input.Makefile
30
subMake = $(MAKE) $(@:$(root)/%=%) --directory=$(root)
31
+_ = $(+:_%=)
32
addBeforeExt = $(basename $(2))$(1)$(suffix $(2))
33

    
34
# Terminal
35
termCols := $(shell tput cols)
36
esc := '['
37
reset := $(esc)'0m'
38
emph := $(esc)'7m '
39
endEmph := ' '$(reset)
40

    
41
# User interaction
42

    
43
confirm = $(if $(shell read -p $(emph)"$(1)"$(endEmph)$$'$(if\
44
$(2),\n$(2))\nContinue? (y/n) ' REPLY; test "$$REPLY" = y && echo t),,\
45
$(error Aborting))
46

    
47
# Commands
48
MKDIR = mkdir -p
49
mkdir = $(MKDIR) $(@D)
50
CP = cp -p
51
diff = diff --unified=2
52
diffIgnoreSpace = $(diff) --ignore-space-change
53
diffVerbose = $(if $(verbose),diff --side-by-side --left-column\
54
--width=$(termCols),$(diff))
55

    
56
# BIEN commands
57
sortFilenames = $(shell $(bin)/sort_filenames $(1))
58
selfMap = $(bin)/cols 0 0
59
psqlOpts := --set ON_ERROR_STOP=1 --quiet
60
psqlAsBien := $(bin)/psql_vegbien $(psqlOpts)
61
searchPath := $(datasrc),$(shell prefix=; . $(bin)/vegbien_dest;\
62
echo "$$schemas")
63
searchPath := "$(subst $(comma),"$(comma)",$(searchPath))"
64
# Usage: ($(inDatasrc); cat $(file))|$(psqlCmd)
65
inDatasrc := echo 'SET search_path TO $(searchPath);'
66

    
67
# SVN
68
addDir = $(if $(wildcard $(1)/),svn add --depth=empty $(1),svn mkdir $(1))
69
setSvnIgnore = svn propset svn:ignore $(2) $(1)
70
define addDirWithIgnore
71
$(addDir)
72
$(setSvnIgnore)
73
endef
74

    
75
##### Environment
76

    
77
export PATH := $(bin):$(PATH)
78

    
79
##### General targets
80

    
81
all: _always maps ;
82

    
83
clean: _always
84
	$(RM) $(all)
85

    
86
remake: _always clean
87
	+$(selfMake)
88
# re-run make so that cache of existing files is reset
89

    
90
# Only remake if doesn't exist. This prevents unintentional remaking when the
91
# make script is newly checked out from svn (which sets the mod time to now) but
92
# the output is synced externally.
93
# Can't remove prereq to do this, because it determines when the rule applies.
94
make_script = $(if $(wildcard $@),,"time" ./$< >$@)
95

    
96
%/: % _always ;
97

    
98
%: %.make
99
	$(make_script)
100
.PRECIOUS: % # save partial outputs of aborted src make scripts
101

    
102
##### SVN
103

    
104
svnFilesGlob:= */{,{,.}{map,*terms,VegBIEN}.csv{,.*},*header.*,*.sql,test*.xml*}
105
svnFilesGlob := {*schema*.sql,{,*/}*.make,$(svnFilesGlob)}
106
_svnFilesGlob := _MySQL/{,*schema*.sql,*.make}
107
svnFiles = $(filter-out _% logs/%,$(call wildcard/,$(svnFilesGlob)))\
108
$(call wildcard/,$(_svnFilesGlob))
109

    
110
add: _always
111
	$(call setSvnIgnore,.,'*')
112
	$(call addDirWithIgnore,logs,$$'*.log.sql\n*.trace')
113
	$(call addDirWithIgnore,verify,'*.out')
114
	$(call addFile,import_order.txt)
115
	$(call add*,$(svnFiles))
116

    
117
# Adds a new table subdir
118
%/add: _always
119
	$(call addDirWithIgnore,$*,'*')
120
	$(call addDirWithIgnore,$*/logs,$$'*.log.sql\n*.trace')
121

    
122
##### Existing maps discovery
123

    
124
sortFile := import_order.txt
125

    
126
tables := $(if $(wildcard $(sortFile)),$(shell cat $(sortFile)))
127
    # $(shell) replaces "\n" with " "
128
allSubdirs := $(call wildcard/,*/)
129
allTables := $(call sortFilenames,$(filter-out _% verify logs,$(allSubdirs:%/=%)))
130
joinedTables := $(filter-out $(tables),$(allTables))
131
allTables := $(joinedTables) $(tables)# move joined tables to beginning
132
ifeq ($(tables),)# none specified in sort file
133
tables := $(allTables)
134
endif
135

    
136
anyMap := %/map.csv %/VegBIEN.csv %/unmapped_terms.csv %/new_terms.csv
137

    
138
extsFilter := $(addprefix %.,$(exts))
139
dataOnly = $(filter $(extsFilter),$(1))
140

    
141
anyTest = $*/test.%
142
srcsOnly = $(filter-out $(anyMap) $(anyTest) %/logs,$(call dataOnly,$(1)))
143

    
144
vocab := $(mappings)/VegCore.csv
145
coreMap := $(mappings)/VegCore-VegBIEN.csv
146
dict := $(mappings)/Veg+-VegCore.csv
147

    
148
viaMaps := $(tables:%=%/map.csv)
149

    
150
autogenMaps := $(subst map.,VegBIEN.,$(viaMaps))
151
directMaps := $(autogenMaps) $(filter-out $(autogenMaps),\
152
$(wildcard */VegBIEN.csv))
153

    
154
##### Sources
155

    
156
srcs = $(call sortFilenames,$(call srcsOnly,$(wildcard $*/*)))
157
nonHeaderSrcs = $(filter-out %/header.csv,$(srcs))
158
isRef = $(if $(nonHeaderSrcs),,1)
159
    # empty subdir, so references an already-installed staging table
160
isXml = $(filter %.xml,$(nonHeaderSrcs))
161
nonXml = $(if $(isXml),,1)
162
isCsv = $(if $(nonHeaderSrcs),$(if $(isXml),,1))
163
    # true if $(srcs) non-empty and contains no *.xml
164
catSrcs = $(bin)/cat$(if $(nonXml),_csv) $(srcs)
165
withCatSrcs = $(catSrcs:$(bin)/%=$(bin)/with_%) --
166

    
167
# Usage: `make {--silent|-s} inputs/<datasrc>/cat` (don't echo make commands)
168
cat: $(tables:%=%/cat) _always ;
169

    
170
%/cat: _always
171
	$(catSrcs)
172

    
173
##### Staging tables installation
174

    
175
srcTable := %.src
176

    
177
dbExports := $(sort $(wildcard *schema*.sql))# schemas first
178
ifeq ($(schema_only),)
179
dbExports += $(sort $(filter-out $(dbExports),$(wildcard *.sql)))# all others
180
endif
181
dbExports := $(strip $(dbExports))# += adds extra whitespace
182
allInstalls := $(if $(dbExports),sql) $(allTables)
183

    
184
install: _always schema $(allInstalls:%=%/install) ;
185

    
186
uninstall: _always confirm_rm_schema rm_schema ;
187
# rm_schema will also drop all staging tables
188

    
189
reinstall: _always uninstall install ;
190

    
191
confirm_rm_schema: _always
192
	$(if $(filter TNRS,$(datasrc)),$(call confirm,WARNING: This will delete the\
193
TNRS cache!,To save it: make backups/TNRS.backup-remake))
194

    
195
schema: _always
196
	-echo 'CREATE SCHEMA "$(datasrc)";'|$(psqlAsBien)
197
# ignore errors if schema exists
198

    
199
rm_schema: _always
200
	echo 'DROP SCHEMA IF EXISTS "$(datasrc)" CASCADE;'|$(psqlAsBien)
201

    
202
installLog := logs/install.log.sql
203

    
204
logInstall = $(if $(log),$(if $(quiet),$(2)$(1)$(installLog) 2>&1,2>&1|tee $(3)\
205
$(1)$(installLog)))
206
logInstallRoot = $(call logInstall,,>)
207
logInstall* = $(call logInstall,$*/,>)
208
logInstall*Add = $(call logInstall,$*/,>>,-a)# append to log
209

    
210
# Must come before %/install to override it
211
sql/install: $(dbExports)
212
	($(inDatasrc); cat $+|grep -vF 'SET search_path')|"time" $(psqlAsBien) \
213
--set=schema='"$(datasrc)"' $(logInstallRoot)
214

    
215
# Must come before `%.sql: _MySQL/%.sql` to override it
216
%.sql: %.sql.make
217
	$(make_script)
218

    
219
# The export must be created with:
220
# `--compatible=postgresql --add-locks=false --set-charset --no-create-info`
221
# Must come before `%.sql: _MySQL/%.sql` to override it
222
%.data.sql: _MySQL/%.data.sql
223
	$(bin)/my2pg.data <$< >$@
224

    
225
# The export must be created with:
226
# `--compatible=postgresql --add-locks=false --set-charset`
227
# Add `--no-data` to create a schema-only export.
228
%.sql: _MySQL/%.sql
229
	$(bin)/my2pg <$< >$@
230

    
231
cleanup = $(if $(wildcard $*/cleanup.sql),($(inDatasrc); cat $*/cleanup.sql)\
232
|"time" $(psqlAsBien) --echo-all --set=table='"$*"' $(logInstall*Add),\
233
(prefix=; . $(bin)/vegbien_dest; env schema=$(datasrc) table=$* $(bin)/csv2db)\
234
$(logInstall*Add))
235

    
236
define exportHeader
237
$(cleanup)
238
echo 'SELECT * FROM "$(datasrc)"."$*" LIMIT 0;'|$(psqlAsBien) \
239
--no-align --field-separator=, --pset=footer=off >$*/header.csv
240
endef
241

    
242
# For staging tables which are derived by joining together other staging tables.
243
%/install %/header.csv: %/create.sql _always
244
	($(inDatasrc); echo 'CREATE TABLE "$*" AS'; cat $<; echo ';')|"time" \
245
$(psqlAsBien) --echo-all --set=schema='"$(datasrc)"' --set=table='"$*"' \
246
$(logInstall*)
247
	$(exportHeader)
248
.PRECIOUS: %/header.csv
249

    
250
%/install: _always
251
	$(if $(isRef),$(exportHeader),$(if $(nonXml),$(import_install_)))
252
define import_install_
253
(prefix=; . $(bin)/vegbien_dest; "time" nice -n +5\
254
env schema=$(datasrc) table=$* $(bin)/csv2db $(catSrcs) $(logInstall*))
255
$(if $(filter $(srcTable),$*),($(inDatasrc);\
256
echo 'ALTER TABLE "$(datasrc)"."$*" RENAME row_num TO "$*.row_num";')|"time"\
257
$(psqlAsBien) --echo-all --set=table='"$*"' $(logInstall*Add))
258
endef
259
# table-scope src table's row_num col to allow joining it with other tables
260

    
261
%/uninstall: _always
262
	echo 'DROP TABLE IF EXISTS "$(datasrc)"."$*" CASCADE;'|$(psqlAsBien)
263

    
264
%/reinstall: _always %/uninstall %/install ;
265

    
266
cleanup: _always $(tables:%=%/cleanup) ;
267

    
268
# WARNING: This removes any index comments, due to a PostgreSQL bug.
269
# This occurs because ALTER TABLE recreates the index but not its comment.
270
%/cleanup: _always
271
	$(cleanup)
272

    
273
##### Maps building
274

    
275
# WARNING: You CANNOT make a subdir using `make inputs/<datasrc>/<subdir>/`.
276
# You must instead make the entire datasource dir: `make inputs/<datasrc>/`
277

    
278
# Maps to (try to) build are added to this
279
maps :=
280

    
281
srcRoot = $(mappings)/root.sh
282
mkSrcMap = $(catSrcs)|(. $(srcRoot); env datasrc=$(datasrc) $(bin)/src_map >$@)
283

    
284
# Via maps cleanup
285
ifneq ($(filter %/.map.csv.last_cleanup,$(MAKECMDGOALS)),)
286
%/.map.csv.last_cleanup: %/map.csv $(vocab) $(dict) $(coreMap)
287
	$(bin)/in_place $< $(bin)/canon 1 $(vocab)
288
	$(bin)/in_place $< $(bin)/canon 1 $(dict)
289
	$(bin)/in_place $< $(bin)/translate 1 $(dict)
290
	touch $@
291
	+$(selfMake) $(<:%/map.csv=%/unmapped_terms.csv)
292
	+$(selfMake) $(<:%/map.csv=%/new_terms.csv)
293
.PRECIOUS: %/.map.csv.last_cleanup
294
else
295
%/map.csv: _always
296
	$(if $(wildcard $@),,$(if $(nonXml),$(mkSrcMap)))
297
	+$(selfMake) $(@:%/map.csv=%/.map.csv.last_cleanup)
298
.PRECIOUS: %/map.csv
299
endif
300

    
301
%/VegBIEN.csv: %/map.csv $(coreMap)
302
	<$< $(bin)/cat_cols 1 2|$(bin)/join $(coreMap)|$(bin)/sort_map >$@
303
maps += $(autogenMaps)
304

    
305
maps: $(maps) _always ;
306

    
307
all += $(maps)
308

    
309
##### Maps validation
310

    
311
# `tail -n +2`: Remove header before running filter_out_ci because filter_out_ci
312
# only removes the header if it matches the vocabulary's header.
313

    
314
%/unmapped_terms.csv: %/map.csv $(coreMap)
315
	tail -n +2 $<|$(bin)/cols 1|$(bin)/filter_out_ci 0 $(coreMap) >$@
316
	$(bin)/autoremove $@
317

    
318
%/new_terms.csv: %/map.csv $(vocab) $(dict) %/unmapped_terms.csv
319
	$(newTerms)
320
	$(bin)/autoremove $@
321
newTerms = tail -n +2 $<|$(bin)/filter_out_ci 0 $(vocab)|$(bin)/filter_out_ci 0\
322
$(dict) $(if $(wildcard $(word 4,$+)),|$(bin)/filter_out_ci 0 $(word 4,$+))\
323
|grep -vE '^"?:' >$@; exit 0# because grep exits nonzero if no match
324

    
325
termsSubdirs := $(tables)
326

    
327
include $(root)/lib/mappings.Makefile
328

    
329
##### External dependencies
330

    
331
$(root)/%: _always
332
	+$(subMake)
333
.PRECIOUS: $(root)/% # let ext. dir's Makefile decide whether to delete on error
334

    
335
##### Mapping
336

    
337
+maps = $(filter %/map.csv %/VegBIEN.csv $(mappings)/%,$(+_))
338
map2db = env in_database=vegbien in_schema=$(datasrc) in_table=$*\
339
out_database=vegbien $(root)/map $(+maps)
340

    
341
##### Import to VegBIEN
342

    
343
profileTest = $(if $(profile),$(if $(test),1))
344
profileOnly = -env profile_to=/dev/fd/3 $(map2db) 3>&1 1>&2|\
345
$(bin)/profile_stats /dev/fd/0
346

    
347
log_ = $*/logs/$(if $(n),n=$(n).,)$(version).log.sql
348
trace = $(log_:.log.sql=.trace)
349
import = -$(if $(profileTest),$(profileOnly),(set -x; "time" env commit=1\
350
$(if $(profile),profile_to=$(trace)) $(map2db)) $(if $(log),\
351
$(if $(n),,&>$(log_)))$(if $(log),$(if $(n), 2>&1|tee -a $(log_))))
352
# don't abort on import errors, which often relate to invalid input data
353

    
354
import: $(tables:%=%/import) _always ;
355

    
356
%/import: %/VegBIEN.csv _always
357
	$(import)
358
# default:
359
%/import: _always ;
360

    
361
##### Log files from import
362

    
363
logs := $(wildcard */logs/*.log.sql */logs/*.trace)
364

    
365
rm_logs: _always
366
	$(RM) $(logs)
367

    
368
##### Verification of import
369

    
370
verifyTables := $(patsubst verify/%.ref,%,$(wildcard verify/*.ref))
371

    
372
verify: $(verifyTables:%=%/verify) _always ;
373

    
374
%/verify: verify/%.ref verify/%.out _always
375
	-$(diffVerbose) $(+_)
376
# don't abort on verification errors, which are expected during development
377
# default:
378
%/verify: verify/%.out _always
379
	$(if $(shell test -e $< && echo t),cat $<)
380
# don't run if verify/%.out's default do-nothing action was used
381
# can't use $(wildcard) because it won't recheck file after verify/%.out is run
382

    
383
psqlExport := "time" $(psqlAsBien) --no-align --field-separator=$$'\t'\
384
--pset=footer=off --pset=null=NULL
385
verify = $(if $(reverify),$(psqlExport) --set=datasource="'$(datasrc)'" <$< >$@)
386

    
387
verify/%.out: $(mappings)/verify.%.sql _always
388
	$(verify)
389
# default:
390
verify/%.out: _always ;
391

    
392
all += $(wildcard verify/*.out)
393

    
394
%.ref: %.ref.sql
395
	($(inDatasrc); cat $<)|$(psqlExport) >$@
396

    
397
##### Editing import
398

    
399
rotate: _always
400
	echo "UPDATE party SET organizationname = organizationname||'.$(version)'\
401
WHERE organizationname = '$(datasrc)';"|$(psqlAsBien)
402

    
403
rm: _always
404
	echo "DELETE FROM party WHERE organizationname = '$(datasrc)';"|\
405
$(psqlAsBien)
406

    
407
##### Testing
408

    
409
testRefOutput = $(subst .by_col,,$(1))
410
testRef = $(testRefOutput).ref
411
hasOwnRef = $(filter $@,$(call testRefOutput,$@))
412
# filter returns non-empty if they are equal
413

    
414
# `rm $@`: Remove outputs of successful tests to reduce clutter
415
# `$(foreach use_staged...)`: Run with use_staged=1
416
define runTest
417
@echo "Testing $(abspath $@)..."
418
>$@ env test=1 n=$(test_n) $(1) $(foreach use_staged,1,$(map2db))
419
@(set -x; $(diffIgnoreSpace) $(call testRef,$@) $@) 2>&1 && rm $@ || { e=$$?;\
420
$(if $(wildcard $(call testRef,$@)),,cat $@;)\
421
$(if $(hasOwnRef),\
422
{\
423
read -p $(emph)'Accept new test output? (y/n)'$(endEmph) REPLY;\
424
if test "$$REPLY" = y; then\
425
(set -x; $(MAKE) $@-ok --directory=$(realpath .) --makefile=../input.Makefile);\
426
exit 0;\
427
fi;\
428
};,\
429
echo $(emph)"Note: The preceding failed test is compared to another test's\
430
output"$(endEmph);\
431
echo $(emph)"When it fails, this always indicates a bug"$(endEmph);\
432
)\
433
exit $$e;}
434
endef
435

    
436
tests :=
437

    
438
# Requires staging tables. To create them, run `make inputs/<datasrc>/install`.
439
# Non-flat-file inputs fall back to mimicking a successful test
440
%/test.xml: %/VegBIEN.csv _always
441
	$(if $(nonXml),$(call runTest,by_col=))
442
tests += %/test.xml
443

    
444
%/test.by_col.xml: %/VegBIEN.csv _always
445
	$(if $(nonXml),$(call runTest,by_col=1))
446

    
447
# Only run column-based tests if column-based mode enabled, because these tests
448
# are much slower than the row-based tests for small numbers of rows
449
ifneq ($(by_col),)
450
tests += %/test.by_col.xml
451
endif
452

    
453
testOutputs := $(foreach test,$(tests),$(tables:%=$(test)))
454

    
455
.PRECIOUS: $(testOutputs) # save outputs of failed tests so they can be accepted
456

    
457
test: _always $(testOutputs) ;
458

    
459
all += $(wildcard %/test*.xml)
460

    
461
# Accepts a test output: make <test_output_path>-ok
462
%-ok: _always
463
	mv $* $(call testRef,$*)
464

    
465
accept-all: _always
466
	+yes|$(selfMake) test
467

    
468
##### Documentation
469

    
470
steps = $(selfMake) -s $*/import test=1 by_col=1 verbosity=2 n=100\
471
2>&1|$(bin)/debug2redmine >$@
472

    
473
%/logs/steps.by_col.log.sql: _always
474
	+$(steps)
(4-4/4)