Project

General

Profile

1
selfDir_uZPPqC := $(dir $(lastword $(MAKEFILE_LIST)))
2
root := $(selfDir_uZPPqC)..
3
include $(root)/lib/common.Makefile
4

    
5

    
6
##### Configuration
7

    
8
# Command line
9
log ?= $(if $(test),,1)
10
profile ?=
11
quiet ?=
12
reverify ?= 1
13
schema_only ?=
14
use_staged ?= $(by_col)
15

    
16
# Makefile
17
exts ?= csv tsv txt dmp xml
18
test_n ?= 2
19

    
20
##### Vars/functions
21

    
22
# Paths
23
datasrc := $(patsubst .%,%,$(notdir $(realpath .)))
24
bin := $(root)/bin
25
mappings := $(root)/mappings
26

    
27
# Make
28
SHELL := /bin/bash
29
selfMake = $(MAKE) --makefile=../input.Makefile
30
subMake = $(MAKE) $(@:$(root)/%=%) --directory=$(root)
31
+_ = $(+:_%=)
32
addBeforeExt = $(basename $(2))$(1)$(suffix $(2))
33

    
34
# Terminal
35
termCols := $(shell tput cols)
36
esc := '['
37
reset := $(esc)'0m'
38
emph := $(esc)'7m '
39
endEmph := ' '$(reset)
40

    
41
# User interaction
42

    
43
confirm = $(if $(shell read -p $(emph)"$(1)"$(endEmph)$$'$(if\
44
$(2),\n$(2))\nContinue? (y/n) ' REPLY; test "$$REPLY" = y && echo t),,\
45
$(error Aborting))
46

    
47
# Commands
48
MKDIR = mkdir -p
49
mkdir = $(MKDIR) $(@D)
50
CP = cp -p
51
diff = diff --unified=2
52
diffIgnoreSpace = $(diff) --ignore-space-change
53
diffVerbose = $(if $(verbose),diff --side-by-side --left-column\
54
--width=$(termCols),$(diff))
55

    
56
# BIEN commands
57
sortFilenames = $(shell $(bin)/sort_filenames $(1))
58
selfMap = $(bin)/cols 0 0
59
psqlOpts := --set ON_ERROR_STOP=1 --quiet
60
psqlAsBien := $(bin)/psql_vegbien $(psqlOpts)
61
searchPath := $(datasrc),$(shell prefix=; . $(bin)/vegbien_dest;\
62
echo "$$schemas")
63
searchPath := "$(subst $(comma),"$(comma)",$(searchPath))"
64
# Usage: ($(inDatasrc); cat $(file))|$(psqlCmd)
65
inDatasrc := echo 'SET search_path TO $(searchPath);'
66

    
67
# SVN
68
addDir = $(if $(wildcard $(1)/),svn add --depth=empty $(1),svn mkdir $(1))
69
setSvnIgnore = svn propset svn:ignore $(2) $(1)
70
define addDirWithIgnore
71
$(addDir)
72
$(setSvnIgnore)
73
endef
74

    
75
##### Environment
76

    
77
export PATH := $(bin):$(PATH)
78

    
79
##### General targets
80

    
81
all: _always maps ;
82

    
83
clean: _always
84
	$(RM) $(all)
85

    
86
remake: _always clean
87
	+$(selfMake)
88
# re-run make so that cache of existing files is reset
89

    
90
# Only remake if doesn't exist. This prevents unintentional remaking when the
91
# make script is newly checked out from svn (which sets the mod time to now) but
92
# the output is synced externally.
93
# Can't remove prereq to do this, because it determines when the rule applies.
94
make_script = $(if $(wildcard $@),,"time" ./$< >$@)
95

    
96
%/: % _always ;
97

    
98
%: %.make
99
	$(make_script)
100
.PRECIOUS: % # save partial outputs of aborted src make scripts
101

    
102
##### SVN
103

    
104
svnFilesGlob := */{{,.}{map,*terms,VegBIEN}.csv{,.*},*header.*,*.sql,test*.xml*}
105
svnFilesGlob := {*schema*.sql,{,*/}*.make,$(svnFilesGlob)}
106
_svnFilesGlob := _MySQL/{*schema*.sql,*.make}
107
svnFiles = $(filter-out _% logs/%,$(call wildcard/,$(svnFilesGlob)))\
108
$(call wildcard/,$(_svnFilesGlob))
109

    
110
add: _always
111
	$(call setSvnIgnore,.,'*')
112
	$(call addDirWithIgnore,logs,$$'*.log.sql\n*.trace')
113
	$(call addDirWithIgnore,verify,'*.out')
114
	$(call addFile,import_order.txt)
115
	$(call add*,$(svnFiles))
116

    
117
# Adds a new table subdir
118
%/add: _always
119
	$(call addDirWithIgnore,$*,'*')
120
	$(call addDirWithIgnore,$*/logs,$$'*.log.sql\n*.trace')
121

    
122
##### Existing maps discovery
123

    
124
sortFile := import_order.txt
125

    
126
tables := $(if $(wildcard $(sortFile)),$(shell cat $(sortFile)))
127
    # $(shell) replaces "\n" with " "
128
allSubdirs := $(call wildcard/,*/)
129
allTables := $(call sortFilenames,$(filter-out _% verify logs,$(allSubdirs:%/=%)))
130
joinedTables := $(filter-out $(tables),$(allTables))
131
allTables := $(joinedTables) $(tables)# move joined tables to beginning
132
ifeq ($(tables),)# none specified in sort file
133
tables := $(allTables)
134
endif
135

    
136
anyMap := %/map.csv %/VegBIEN.csv %/unmapped_terms.csv %/new_terms.csv
137

    
138
extsFilter := $(addprefix %.,$(exts))
139
dataOnly = $(filter $(extsFilter),$(1))
140

    
141
anyTest = $*/test.%
142
srcsOnly = $(filter-out $(anyMap) $(anyTest) %/logs,$(call dataOnly,$(1)))
143

    
144
vocab := $(mappings)/VegCore.csv
145
coreMap := $(mappings)/VegCore-VegBIEN.csv
146
dict := $(mappings)/Veg+-VegCore.csv
147

    
148
viaMaps := $(tables:%=%/map.csv)
149

    
150
autogenMaps := $(subst map.,VegBIEN.,$(viaMaps))
151
directMaps := $(autogenMaps) $(filter-out $(autogenMaps),\
152
$(wildcard */VegBIEN.csv))
153

    
154
##### Sources
155

    
156
srcs = $(call sortFilenames,$(call srcsOnly,$(wildcard $*/*)))
157
nonHeaderSrcs = $(filter-out %/header.csv,$(srcs))
158
isRef = $(if $(nonHeaderSrcs),,1)
159
    # empty subdir, so references an already-installed staging table
160
isXml = $(filter %.xml,$(nonHeaderSrcs))
161
nonXml = $(if $(isXml),,1)
162
isCsv = $(if $(nonHeaderSrcs),$(if $(isXml),,1))
163
    # true if $(srcs) non-empty and contains no *.xml
164
catSrcs = $(bin)/cat$(if $(nonXml),_csv) $(srcs)
165
withCatSrcs = $(catSrcs:$(bin)/%=$(bin)/with_%) --
166

    
167
# Usage: `make {--silent|-s} inputs/<datasrc>/cat` (don't echo make commands)
168
cat: $(tables:%=%/cat) _always ;
169

    
170
%/cat: _always
171
	$(catSrcs)
172

    
173
##### Staging tables installation
174

    
175
srcTable := %.src
176

    
177
dbExports := $(sort $(wildcard *schema*.sql))# schemas first
178
ifeq ($(schema_only),)
179
dbExports += $(sort $(filter-out $(dbExports),$(wildcard *.sql)))# all others
180
endif
181
dbExports := $(strip $(dbExports))# += adds extra whitespace
182
allInstalls := $(if $(dbExports),sql) $(allTables)
183

    
184
install: _always schema $(allInstalls:%=%/install) ;
185

    
186
uninstall: _always confirm_rm_schema rm_schema ;
187
# rm_schema will also drop all staging tables
188

    
189
reinstall: _always uninstall install ;
190

    
191
confirm_rm_schema: _always
192
	$(if $(filter TNRS,$(datasrc)),$(call confirm,WARNING: This will delete the\
193
TNRS cache!,To save it: make backups/TNRS.backup-remake))
194

    
195
schema: _always
196
	-echo 'CREATE SCHEMA "$(datasrc)";'|$(psqlAsBien)
197
# ignore errors if schema exists
198

    
199
rm_schema: _always
200
	echo 'DROP SCHEMA IF EXISTS "$(datasrc)" CASCADE;'|$(psqlAsBien)
201

    
202
installLog := logs/install.log.sql
203

    
204
logInstall = $(if $(log),$(if $(quiet),$(2)$(1)$(installLog) 2>&1,2>&1|tee $(3)\
205
$(1)$(installLog)))
206
logInstallRoot = $(call logInstall,,>)
207
logInstall* = $(call logInstall,$*/,>)
208
logInstall*Add = $(call logInstall,$*/,>>,-a)# append to log
209

    
210
# Must come before %/install to override it
211
sql/install: $(dbExports)
212
	($(inDatasrc); cat $+|grep -vF 'SET search_path')|"time" $(psqlAsBien) \
213
--set=schema='"$(datasrc)"' $(logInstallRoot)
214

    
215
# Must come before `%.sql: _MySQL/%.sql` to override it
216
%.sql: %.sql.make
217
	$(make_script)
218

    
219
# The export must be created with:
220
# `--compatible=postgresql --add-locks=false --set-charset --no-create-info`
221
# Must come before `%.sql: _MySQL/%.sql` to override it
222
%.data.sql: _MySQL/%.data.sql
223
	$(bin)/my2pg.data <$< >$@
224

    
225
# The export must be created with:
226
# `--compatible=postgresql --add-locks=false --set-charset`
227
# Add `--no-data` to create a schema-only export.
228
%.sql: _MySQL/%.sql
229
	$(bin)/my2pg <$< >$@
230

    
231
cleanup = $(if $(wildcard $*/cleanup.sql),($(inDatasrc); cat $*/cleanup.sql)\
232
|"time" $(psqlAsBien) --echo-all --set=table='"$*"' $(logInstall*Add),\
233
(prefix=; . $(bin)/vegbien_dest; env schema=$(datasrc) table=$* $(bin)/csv2db)\
234
$(logInstall*Add))
235

    
236
define exportHeader
237
$(cleanup)
238
echo 'SELECT * FROM "$(datasrc)"."$*" LIMIT 0;'|$(psqlAsBien) \
239
--no-align --field-separator=, --pset=footer=off >$*/header.csv
240
endef
241

    
242
# For staging tables which are derived by joining together other staging tables.
243
%/install %/header.csv: %/create.sql _always
244
	($(inDatasrc); echo 'CREATE TABLE "$*" AS'; cat $<; echo ';')|"time" \
245
$(psqlAsBien) --echo-all --set=schema='"$(datasrc)"' --set=table='"$*"' \
246
$(logInstall*)
247
	$(exportHeader)
248
.PRECIOUS: %/header.csv
249

    
250
%/install: _always
251
	$(if $(isRef),$(exportHeader),$(if $(nonXml),$(import_install_)))
252
define import_install_
253
(prefix=; . $(bin)/vegbien_dest; "time" nice -n +5\
254
env schema=$(datasrc) table=$* $(bin)/csv2db $(catSrcs) $(logInstall*))
255
$(if $(filter $(srcTable),$*),($(inDatasrc);\
256
echo 'ALTER TABLE "$(datasrc)"."$*" RENAME row_num TO "$*.row_num";')|"time"\
257
$(psqlAsBien) --echo-all --set=table='"$*"' $(logInstall*Add))
258
endef
259
# table-scope src table's row_num col to allow joining it with other tables
260

    
261
%/uninstall: _always
262
	echo 'DROP TABLE IF EXISTS "$(datasrc)"."$*" CASCADE;'|$(psqlAsBien)
263

    
264
%/reinstall: _always %/uninstall %/install ;
265

    
266
cleanup: _always $(tables:%=%/cleanup) ;
267

    
268
# WARNING: This removes any index comments, due to a PostgreSQL bug.
269
# This occurs because ALTER TABLE recreates the index but not its comment.
270
%/cleanup: _always
271
	$(cleanup)
272

    
273
##### Maps building
274

    
275
# WARNING: You CANNOT make a subdir using `make inputs/<datasrc>/<subdir>/`.
276
# You must instead make the entire datasource dir: `make inputs/<datasrc>/`
277

    
278
# Maps to (try to) build are added to this
279
maps :=
280

    
281
srcRoot = $(mappings)/root.sh
282
mkSrcMap = $(catSrcs)|(. $(srcRoot); env datasrc=$(datasrc) $(bin)/src_map >$@)
283

    
284
# Via maps cleanup
285
ifneq ($(filter %/.map.csv.last_cleanup,$(MAKECMDGOALS)),)
286
%/.map.csv.last_cleanup: %/map.csv $(vocab) $(dict) $(coreMap)
287
	$(bin)/in_place $< $(bin)/canon 1 $(vocab)
288
	$(bin)/in_place $< $(bin)/canon 1 $(dict)
289
	$(bin)/in_place $< $(bin)/translate 1 $(dict)
290
	touch $@
291
	+$(selfMake) $(<:%/map.csv=%/unmapped_terms.csv)
292
	+$(selfMake) $(<:%/map.csv=%/new_terms.csv)
293
.PRECIOUS: %/.map.csv.last_cleanup
294
else
295
%/map.csv: _always
296
	$(if $(wildcard $@),,$(if $(nonXml),$(mkSrcMap)))
297
	+$(selfMake) $(@:%/map.csv=%/.map.csv.last_cleanup)
298
.PRECIOUS: %/map.csv
299
endif
300

    
301
%/VegBIEN.csv: %/map.csv $(coreMap)
302
	<$< $(bin)/cat_cols 1 2|$(bin)/join $(coreMap)|$(bin)/sort_map >$@
303
maps += $(autogenMaps)
304

    
305
maps: $(maps) _always ;
306

    
307
all += $(maps)
308

    
309
##### Maps validation
310

    
311
# `tail -n +2`: Remove header before running filter_out_ci because filter_out_ci
312
# only removes the header if it matches the vocabulary's header.
313

    
314
%/unmapped_terms.csv: %/map.csv $(coreMap)
315
	tail -n +2 $<|$(bin)/cols 1|$(bin)/filter_out_ci 0 $(coreMap) >$@
316
	$(bin)/autoremove $@
317

    
318
%/new_terms.csv: %/map.csv $(vocab) $(dict) %/unmapped_terms.csv
319
	$(newTerms)
320
	$(bin)/autoremove $@
321
newTerms = tail -n +2 $<|$(bin)/filter_out_ci 0 $(vocab)|$(bin)/filter_out_ci 0\
322
$(dict) $(if $(wildcard $(word 4,$+)),|$(bin)/filter_out_ci 0 $(word 4,$+)) >$@
323

    
324
termsSubdirs := $(tables)
325

    
326
include $(root)/lib/mappings.Makefile
327

    
328
##### External dependencies
329

    
330
$(root)/%: _always
331
	+$(subMake)
332
.PRECIOUS: $(root)/% # let ext. dir's Makefile decide whether to delete on error
333

    
334
##### Mapping
335

    
336
+maps = $(filter %/map.csv %/VegBIEN.csv $(mappings)/%,$(+_))
337
map2db = env in_database=vegbien in_schema=$(datasrc) in_table=$*\
338
out_database=vegbien $(root)/map $(+maps)
339

    
340
##### Import to VegBIEN
341

    
342
profileTest = $(if $(profile),$(if $(test),1))
343
profileOnly = -env profile_to=/dev/fd/3 $(map2db) 3>&1 1>&2|\
344
$(bin)/profile_stats /dev/fd/0
345

    
346
log_ = $*/logs/$(if $(n),n=$(n).,)$(version).log.sql
347
trace = $(log_:.log.sql=.trace)
348
import = -$(if $(profileTest),$(profileOnly),(set -x; "time" env commit=1\
349
$(if $(profile),profile_to=$(trace)) $(map2db)) $(if $(log),\
350
$(if $(n),,&>$(log_)))$(if $(log),$(if $(n), 2>&1|tee -a $(log_))))
351
# don't abort on import errors, which often relate to invalid input data
352

    
353
import: $(tables:%=%/import) _always ;
354

    
355
%/import: %/VegBIEN.csv _always
356
	$(import)
357
# default:
358
%/import: _always ;
359

    
360
##### Log files from import
361

    
362
logs := $(wildcard */logs/*.log.sql */logs/*.trace)
363

    
364
rm_logs: _always
365
	$(RM) $(logs)
366

    
367
##### Verification of import
368

    
369
verifyTables := $(patsubst verify/%.ref,%,$(wildcard verify/*.ref))
370

    
371
verify: $(verifyTables:%=%/verify) _always ;
372

    
373
%/verify: verify/%.ref verify/%.out _always
374
	-$(diffVerbose) $(+_)
375
# don't abort on verification errors, which are expected during development
376
# default:
377
%/verify: verify/%.out _always
378
	$(if $(shell test -e $< && echo t),cat $<)
379
# don't run if verify/%.out's default do-nothing action was used
380
# can't use $(wildcard) because it won't recheck file after verify/%.out is run
381

    
382
psqlExport := "time" $(psqlAsBien) --no-align --field-separator=$$'\t'\
383
--pset=footer=off --pset=null=NULL
384
verify = $(if $(reverify),$(psqlExport) --set=datasource="'$(datasrc)'" <$< >$@)
385

    
386
verify/%.out: $(mappings)/verify.%.sql _always
387
	$(verify)
388
# default:
389
verify/%.out: _always ;
390

    
391
all += $(wildcard verify/*.out)
392

    
393
%.ref: %.ref.sql
394
	($(inDatasrc); cat $<)|$(psqlExport) >$@
395

    
396
##### Editing import
397

    
398
rotate: _always
399
	echo "UPDATE party SET organizationname = organizationname||'.$(version)'\
400
WHERE organizationname = '$(datasrc)';"|$(psqlAsBien)
401

    
402
rm: _always
403
	echo "DELETE FROM party WHERE organizationname = '$(datasrc)';"|\
404
$(psqlAsBien)
405

    
406
##### Testing
407

    
408
testRefOutput = $(subst .by_col,,$(1))
409
testRef = $(testRefOutput).ref
410
hasOwnRef = $(filter $@,$(call testRefOutput,$@))
411
# filter returns non-empty if they are equal
412

    
413
# `rm $@`: Remove outputs of successful tests to reduce clutter
414
# `$(foreach use_staged...)`: Run with use_staged=1
415
define runTest
416
@echo "Testing $(abspath $@)..."
417
>$@ env test=1 n=$(test_n) $(1) $(foreach use_staged,1,$(map2db))
418
@(set -x; $(diffIgnoreSpace) $(call testRef,$@) $@) 2>&1 && rm $@ || { e=$$?;\
419
$(if $(wildcard $(call testRef,$@)),,cat $@;)\
420
$(if $(hasOwnRef),\
421
{\
422
read -p $(emph)'Accept new test output? (y/n)'$(endEmph) REPLY;\
423
if test "$$REPLY" = y; then\
424
(set -x; $(MAKE) $@-ok --directory=$(realpath .) --makefile=../input.Makefile);\
425
exit 0;\
426
fi;\
427
};,\
428
echo $(emph)"Note: The preceding failed test is compared to another test's\
429
output"$(endEmph);\
430
echo $(emph)"When it fails, this always indicates a bug"$(endEmph);\
431
)\
432
exit $$e;}
433
endef
434

    
435
tests :=
436

    
437
# Requires staging tables. To create them, run `make inputs/<datasrc>/install`.
438
# Non-flat-file inputs fall back to mimicking a successful test
439
%/test.xml: %/VegBIEN.csv _always
440
	$(if $(nonXml),$(call runTest,by_col=))
441
tests += %/test.xml
442

    
443
%/test.by_col.xml: %/VegBIEN.csv _always
444
	$(if $(nonXml),$(call runTest,by_col=1))
445

    
446
# Only run column-based tests if column-based mode enabled, because these tests
447
# are much slower than the row-based tests for small numbers of rows
448
ifneq ($(by_col),)
449
tests += %/test.by_col.xml
450
endif
451

    
452
testOutputs := $(foreach test,$(tests),$(tables:%=$(test)))
453

    
454
.PRECIOUS: $(testOutputs) # save outputs of failed tests so they can be accepted
455

    
456
test: _always $(testOutputs) ;
457

    
458
all += $(wildcard %/test*.xml)
459

    
460
# Accepts a test output: make <test_output_path>-ok
461
%-ok: _always
462
	mv $* $(call testRef,$*)
463

    
464
accept-all: _always
465
	+yes|$(selfMake) test
466

    
467
##### Documentation
468

    
469
steps = $(selfMake) -s $*/import test=1 by_col=1 verbosity=2 n=100\
470
2>&1|$(bin)/debug2redmine >$@
471

    
472
%/logs/steps.by_col.log.sql: _always
473
	+$(steps)
(4-4/4)