1 |
11442
|
psarando
|
#!/bin/bash
|
2 |
|
|
|
3 |
|
|
# Bash script to download geoscrub_input table dump (created by AMK) from the
|
4 |
10707
|
aaronmk
|
# vegbien database, and load it into the geoscrub database (i.e., the
|
5 |
|
|
# postgis database prepped with geonames.org data, GADM2 data, and
|
6 |
|
|
# associated mapping tables).
|
7 |
|
|
#
|
8 |
|
|
# Won't be necessary if we end up injecting all of the geoscrubbing and
|
9 |
|
|
# geovalidation functionality directly into vegbien itself. And if we
|
10 |
|
|
# end up implementing this stuff as a standalone service instead, we'd
|
11 |
|
|
# need to rethink (and generalize) how the input data is handled. But
|
12 |
|
|
# for now, this should at least serve as a placeholder that could be
|
13 |
|
|
# tweaked manually to load any arbitrary geoscrub input data table.
|
14 |
|
|
#
|
15 |
|
|
# Jim Regetz
|
16 |
|
|
# NCEAS
|
17 |
|
|
# Created Nov 2012
|
18 |
11479
|
psarando
|
#
|
19 |
|
|
# Paul Sarando
|
20 |
|
|
# iPlant Collaborative
|
21 |
|
|
# Updated Oct 2013
|
22 |
10707
|
aaronmk
|
|
23 |
11442
|
psarando
|
# Note, to force data to download from DATA_URL, ensure the DATAFILE is deleted
|
24 |
|
|
# before running this script.
|
25 |
11443
|
psarando
|
|
26 |
11479
|
psarando
|
DB_NAME="geoscrub"
|
27 |
11450
|
psarando
|
DB_USER="bien"
|
28 |
11479
|
psarando
|
DB_HOST="localhost"
|
29 |
11493
|
psarando
|
SCRIPT_DIR="$(dirname $0)"
|
30 |
11442
|
psarando
|
DATA_URL="http://fs.vegpath.org/exports/geoscrub_input.no_header.cols=country,stateProvince,county,decimalLatitude,decimalLongitude.csv"
|
31 |
11443
|
psarando
|
DATADIR="${SCRIPT_DIR}/input"
|
32 |
11442
|
psarando
|
DATAFILE="${DATADIR}/geoscrub-corpus.csv"
|
33 |
10707
|
aaronmk
|
|
34 |
11442
|
psarando
|
if [[ ! -d "$DATADIR" ]]; then
|
35 |
|
|
echo "making directory ${DATADIR}"
|
36 |
|
|
mkdir -p "$DATADIR"
|
37 |
|
|
|
38 |
|
|
if [[ $? != 0 ]]; then
|
39 |
|
|
echo "Could not create directory ${DATADIR}"
|
40 |
|
|
exit 1
|
41 |
|
|
fi
|
42 |
|
|
fi
|
43 |
|
|
|
44 |
|
|
if [[ ! -r "$DATAFILE" ]]; then
|
45 |
|
|
# download distinct records from vegbien
|
46 |
|
|
wget -O "$DATAFILE" "$DATA_URL"
|
47 |
|
|
|
48 |
|
|
if [[ $? != 0 ]]; then
|
49 |
|
|
echo "Could not download input to ${DATAFILE}"
|
50 |
|
|
exit 1
|
51 |
|
|
fi
|
52 |
|
|
fi
|
53 |
|
|
|
54 |
11443
|
psarando
|
echo "Loading vegbien data from ${DATAFILE}"
|
55 |
10707
|
aaronmk
|
|
56 |
11443
|
psarando
|
# clear previous data
|
57 |
11479
|
psarando
|
psql -e -U "$DB_USER" -h "$DB_HOST" -d "$DB_NAME" --set ON_ERROR_STOP=1 < "${SCRIPT_DIR}/truncate.vegbien_geoscrub.sql"
|
58 |
11443
|
psarando
|
if [[ $? != 0 ]]; then
|
59 |
|
|
echo "Could not clear data from vegbien_geoscrub tables."
|
60 |
|
|
exit 1
|
61 |
|
|
fi
|
62 |
|
|
|
63 |
11493
|
psarando
|
# load vegbien_geoscrub table with input data
|
64 |
11479
|
psarando
|
psql -U "$DB_USER" -h "$DB_HOST" -d "$DB_NAME" -c "\COPY vegbien_geoscrub FROM '${DATAFILE}' WITH CSV"
|