Revision 37dd6314
Added by selv in ga254@bulldogj almost 11 years ago
terrain/procedures/dem_variables/GFC2013/#sc2_merge_lossyear.sh# | ||
---|---|---|
1 |
|
|
2 |
# for YEAR in `seq 1 12` ; do qsub -v YEAR=$YEAR /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc2_merge_lossyear.sh ; done |
|
3 |
|
|
4 |
#PBS -S /bin/bash |
|
5 |
#PBS -q fas_normal |
|
6 |
#PBS -l mem=2gb |
|
7 |
#PBS -l walltime=1:00:00 |
|
8 |
#PBS -l nodes=1:ppn=1 |
|
9 |
#PBS -V |
|
10 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
11 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
12 |
|
|
13 |
|
|
14 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear/tif_1km |
|
15 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear |
|
16 |
|
|
17 |
# md stay for meadian |
|
18 |
# mn for mean |
|
19 |
|
|
20 |
rm -f /tmp/lossyear${YEAR}_NE.tif /tmp/lossyear${YEAR}_SE.tif /tmp/lossyear${YEAR}_NW.tif /tmp/lossyear${YEAR}_SW.tif /tmp/lossyear${YEAR}.tif |
|
21 |
|
|
22 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??N_???E.tif -o /tmp/lossyear${YEAR}_NE.tif |
|
23 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??S_???E.tif -o /tmp/lossyear${YEAR}_SE.tif |
|
24 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??N_???W.tif -o /tmp/lossyear${YEAR}_NW.tif |
|
25 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??S_???W.tif -o /tmp/lossyear${YEAR}_SW.tif |
|
26 |
|
|
27 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 /tmp/lossyear${YEAR}_??.tif -o /tmp/lossyear${YEAR}.tif |
|
28 |
|
|
29 |
gdal_translate -co COMPRESS=LZW -co ZLEVEL=9 /tmp/lossyear${YEAR}.tif $OUTDIR/lossyear${YEAR}_frequency_GFC2013.tif |
|
30 |
|
|
31 |
rm -f /tmp/lossyear${YEAR}.tif /tmp/lossyear${YEAR}_NE.tif /tmp/lossyear${YEAR}_SE.tif /tmp/lossyear${YEAR}_NW.tif /tmp/lossyear${YEAR}_SW.tif |
|
32 |
|
|
33 |
for YEAR in `seq 2 12 ` ; do |
|
34 |
|
|
35 |
YEARL=`expr 2000 + $YEAR` |
|
36 |
|
|
37 |
gdal_edit.py -mo "Author=giuseppe.amatulli@gmail.com using pktools"\ |
|
38 |
-mo "Input dataset=Global Forest Change 2000-2012 (Hansen 2013)"\ |
|
39 |
-mo "Input layer=Year of gross forest cover loss event"\ |
|
40 |
-mo "Output=Gross Forest Cover Loss during $YEARL in Frequency(%)"\ |
|
41 |
-mo "Offset=0" -mo "Scale=0.01" $OUTDIR/lossyear${YEAR}_frequency_GFC2013.tif |
|
42 |
mv $OUTDIR/lossyear${YEAR}_frequency_GFC2013.tif $OUTDIR/lossyear${YEARL}_frequency_GFC2013.tif |
|
43 |
|
|
44 |
done |
|
45 |
|
|
46 |
|
|
47 |
checkjob -v $PBS_JOBID |
|
48 |
|
terrain/procedures/dem_variables/GFC2013/.#sc2_merge_lossyear.sh | ||
---|---|---|
1 |
ga254@bulldogj.wss.yale.edu.1470:1376934945 |
terrain/procedures/dem_variables/GFC2013/sc1_aggregation_datamask.sh | ||
---|---|---|
1 |
|
|
2 |
# downlaod data from http://earthenginepartners.appspot.com/science-2013-global-forest/download.html |
|
3 |
# cd /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
4 |
# wget -i ../datamask.txt |
|
5 |
# create vrt |
|
6 |
# gdalbuildvrt datamask.vrt *.tif |
|
7 |
# create new tiles becouse the original one have 30001 pixel and can not be done an agregate opereation 30. |
|
8 |
# x 1296036 pixel west to east . togliendo 1 pixel per ogni tile 1296000 che e' divisibile per 30 |
|
9 |
# y |
|
10 |
# 10 tiles east to west ; each tile 129600 30m che 1 km diventano 4320 ; partenza da -180 a + 180 |
|
11 |
# 10 tiles east nord to south; each tile 46800 30m che 1 km diventano 1560 ; partenza + 80 - 50 |
|
12 |
# pixel size 0.000277777777778 30m |
|
13 |
# pixel size 0.002083333333333 1km |
|
14 |
|
|
15 |
# all fine si e' deciso di cancellare un pixel alla fine di ogni tile...forse da cambiare con un gdalwarp |
|
16 |
|
|
17 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_*.tif ; do bash /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh $file ; done |
|
18 |
|
|
19 |
|
|
20 |
|
|
21 |
|
|
22 |
|
|
23 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_*.tif ; do qsub -v file=$file /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh ; done |
|
24 |
|
|
25 |
|
|
26 |
|
|
27 |
|
|
28 |
|
|
29 |
#PBS -S /bin/bash |
|
30 |
#PBS -q fas_normal |
|
31 |
#PBS -l mem=1gb |
|
32 |
#PBS -l walltime=0:20:00 |
|
33 |
#PBS -l nodes=1:ppn=1 |
|
34 |
#PBS -V |
|
35 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
36 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
37 |
|
|
38 |
# file=$1 |
|
39 |
|
|
40 |
module load Tools/Python/2.7.3 |
|
41 |
module load Libraries/GDAL/1.10.0 |
|
42 |
module load Libraries/OSGEO/1.10.0 |
|
43 |
|
|
44 |
filename=$(basename $file .tif) |
|
45 |
|
|
46 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
47 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif_1km |
|
48 |
|
|
49 |
geo_string=$(getCorners4Gtranslate $file) |
|
50 |
ulx=$(echo $geo_string | awk '{ print sprintf("%.0f", $1 )}') # round the number to rounded cordinates |
|
51 |
uly=$(echo $geo_string | awk '{ print sprintf("%.0f", $2 )}') |
|
52 |
lrx=$(echo $geo_string | awk '{ print sprintf("%.0f", $3 )}') |
|
53 |
lry=$(echo $geo_string | awk '{ print sprintf("%.0f", $4 )}') |
|
54 |
|
|
55 |
|
|
56 |
# soutest tile smoler |
|
57 |
if [ ${filename:24:3} = '50S' ] ; then ysize=25200 ; else ysize=36000 ; fi |
|
58 |
|
|
59 |
gdal_translate -srcwin 0 0 36000 $ysize -a_ullr $ulx $uly $lrx $lry -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/$filename.tif $OUTDIR/tmp_$filename.tif |
|
60 |
pkfilter -co COMPRESS=LZW -ot Float32 -class 1 -dx 30 -dy 30 -f density -d 30 -i $OUTDIR/tmp_$filename.tif -o $OUTDIR/1km_tmp_$filename.tif |
|
61 |
gdal_calc.py -A $OUTDIR/1km_tmp_$filename.tif --calc="(A * 100 )" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile $OUTDIR/1km_tmp2_$filename.tif |
|
62 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/1km_tmp2_$filename.tif $OUTDIR/1km_$filename.tif |
|
63 |
rm $OUTDIR/tmp_$filename.tif $OUTDIR/1km_tmp_$filename.tif $OUTDIR/1km_tmp2_$filename.tif |
|
64 |
|
|
65 |
|
|
66 |
|
|
67 |
|
|
68 |
|
|
69 |
# checkjob -v $PBS_JOBID |
terrain/procedures/dem_variables/GFC2013/sc1_aggregation_datamask_warp.sh | ||
---|---|---|
1 |
|
|
2 |
# downlaod data from http://earthenginepartners.appspot.com/science-2013-global-forest/download.html |
|
3 |
# cd /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
4 |
# wget -i ../datamask.txt |
|
5 |
# create vrt |
|
6 |
# gdalbuildvrt datamask.vrt *.tif |
|
7 |
# create new tiles becouse the original one have 30001 pixel and can not be done an agregate opereation 30. |
|
8 |
# x 1296036 pixel west to east . togliendo 1 pixel per ogni tile 1296000 che e' divisibile per 30 |
|
9 |
# y |
|
10 |
# 10 tiles east to west ; each tile 129600 30m che 1 km diventano 4320 ; partenza da -180 a + 180 |
|
11 |
# 10 tiles east nord to south; each tile 46800 30m che 1 km diventano 1560 ; partenza + 80 - 50 |
|
12 |
# pixel size 0.000277777777778 30m |
|
13 |
# pixel size 0.002083333333333 1km |
|
14 |
|
|
15 |
# all fine si e' deciso di cancellare un pixel alla fine di ogni tile...forse da cambiare con un gdalwarp |
|
16 |
|
|
17 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_*.tif ; do bash /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh $file ; done |
|
18 |
|
|
19 |
|
|
20 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_00N_010E.tif ; do qsub -v file=$file /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_aggregation_datamask_warp.sh ; done |
|
21 |
|
|
22 |
|
|
23 |
|
|
24 |
#PBS -S /bin/bash |
|
25 |
#PBS -q fas_normal |
|
26 |
#PBS -l mem=1gb |
|
27 |
#PBS -l walltime=0:20:00 |
|
28 |
#PBS -l nodes=1:ppn=1 |
|
29 |
#PBS -V |
|
30 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
31 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
32 |
|
|
33 |
# file=$1 |
|
34 |
|
|
35 |
module load Tools/Python/2.7.3 |
|
36 |
module load Libraries/GDAL/1.10.0 |
|
37 |
module load Libraries/OSGEO/1.10.0 |
|
38 |
|
|
39 |
filename=$(basename $file .tif) |
|
40 |
|
|
41 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
42 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif_1km |
|
43 |
|
|
44 |
geo_string=$(getCorners4Gwarp $file) |
|
45 |
ulx=$(echo $geo_string | awk '{ print sprintf("%.0f", $1 )}') # round the number to rounded cordinates |
|
46 |
uly=$(echo $geo_string | awk '{ print sprintf("%.0f", $2 )}') |
|
47 |
lrx=$(echo $geo_string | awk '{ print sprintf("%.0f", $3 )}') |
|
48 |
lry=$(echo $geo_string | awk '{ print sprintf("%.0f", $4 )}') |
|
49 |
|
|
50 |
|
|
51 |
# soutest tile smoler |
|
52 |
if [ ${filename:24:3} = '50S' ] ; then ysize=25200 ; else ysize=36000 ; fi |
|
53 |
|
|
54 |
|
|
55 |
|
|
56 |
gdalwarp -tr 0.000277777777778 0.000277777777778 -multi -te $ulx $uly $lrx $lry -rn -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/$filename.tif $OUTDIR/tmp_$filename.tif |
|
57 |
|
|
58 |
|
|
59 |
pkfilter -co COMPRESS=LZW -ot Float32 -class 1 -dx 30 -dy 30 -f density -d 30 -i $OUTDIR/tmp_$filename.tif -o $OUTDIR/1km_tmp_$filename.tif |
|
60 |
gdal_calc.py -A $OUTDIR/1km_tmp_$filename.tif --calc="(A * 100 )" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile $OUTDIR/1km_tmp2_$filename.tif |
|
61 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/1km_tmp2_$filename.tif $OUTDIR/1kmW_$filename.tif |
|
62 |
# rm $OUTDIR/tmp_$filename.tif $OUTDIR/1km_tmp_$filename.tif $OUTDIR/1km_tmp2_$filename.tif |
|
63 |
|
|
64 |
|
|
65 |
|
|
66 |
|
|
67 |
|
|
68 |
# checkjob -v $PBS_JOBID |
terrain/procedures/dem_variables/GFC2013/sc1_aggregation_lossyear.sh | ||
---|---|---|
1 |
|
|
2 |
# downlaod data from http://earthenginepartners.appspot.com/science-2013-global-forest/download.html |
|
3 |
# cd /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
4 |
# wget -i ../datamask.txt |
|
5 |
# create vrt |
|
6 |
# gdalbuildvrt datamask.vrt *.tif |
|
7 |
# create new tiles becouse the original one have 30001 pixel and can not be done an agregate opereation 30. |
|
8 |
# x 1296036 pixel west to east . togliendo 1 pixel per ogni tile 1296000 che e' divisibile per 30 |
|
9 |
# y |
|
10 |
# 10 tiles east to west ; each tile 129600 30m che 1 km diventano 4320 ; partenza da -180 a + 180 |
|
11 |
# 10 tiles east nord to south; each tile 46800 30m che 1 km diventano 1560 ; partenza + 80 - 50 |
|
12 |
# pixel size 0.000277777777778 30m |
|
13 |
# pixel size 0.002083333333333 1km |
|
14 |
|
|
15 |
# all fine si e' deciso di cancellare un pixel alla fine di ogni tile...forse da cambiare con un gdalwarp , warp non va bene, il pixel finale e' in overlap with il seguente tile |
|
16 |
|
|
17 |
|
|
18 |
|
|
19 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_lossyear_*.tif ; do bash /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_aggregation_lossyear.sh ; done |
|
20 |
|
|
21 |
|
|
22 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/lossyear/tif/Hansen_GFC2013_lossyear_*.tif ; do qsub -v file=$file /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_aggregation_lossyear.sh ; done |
|
23 |
|
|
24 |
|
|
25 |
#PBS -S /bin/bash |
|
26 |
#PBS -q fas_normal |
|
27 |
#PBS -l mem=1gb |
|
28 |
#PBS -l walltime=1:00:00 |
|
29 |
#PBS -l nodes=1:ppn=1 |
|
30 |
#PBS -V |
|
31 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
32 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
33 |
|
|
34 |
# file=$1 |
|
35 |
|
|
36 |
module load Tools/Python/2.7.3 |
|
37 |
module load Libraries/GDAL/1.10.0 |
|
38 |
module load Libraries/OSGEO/1.10.0 |
|
39 |
|
|
40 |
export filename=$(basename $file .tif) |
|
41 |
|
|
42 |
export INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear/tif |
|
43 |
export OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear/tif_1km |
|
44 |
|
|
45 |
geo_string=$(getCorners4Gtranslate $file) |
|
46 |
ulx=$(echo $geo_string | awk '{ print sprintf("%.0f", $1 )}') # round the number to rounded cordinates |
|
47 |
uly=$(echo $geo_string | awk '{ print sprintf("%.0f", $2 )}') |
|
48 |
lrx=$(echo $geo_string | awk '{ print sprintf("%.0f", $3 )}') |
|
49 |
lry=$(echo $geo_string | awk '{ print sprintf("%.0f", $4 )}') |
|
50 |
|
|
51 |
|
|
52 |
# soutest tile smaler |
|
53 |
if [ ${filename:24:3} = '50S' ] ; then ysize=25200 ; else ysize=36000 ; fi |
|
54 |
|
|
55 |
gdal_translate -srcwin 0 0 36000 $ysize -a_ullr $ulx $uly $lrx $lry -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/$filename.tif $OUTDIR/tmp_$filename.tif |
|
56 |
|
|
57 |
seq 1 12 | xargs -n 1 -P 12 bash -c $' |
|
58 |
class=$1 |
|
59 |
pkfilter -co COMPRESS=LZW -ot Float32 -class $class -dx 30 -dy 30 -f density -d 30 -i $OUTDIR/tmp_$filename.tif -o /tmp/1km_y${class}_tmp_$filename.tif |
|
60 |
gdal_calc.py -A /tmp/1km_y${class}_tmp_$filename.tif --calc="(A * 100)" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile /tmp/1km_y${class}_tmp2_$filename.tif |
|
61 |
rm -f /tmp/1km_y${class}_tmp_$filename.tif |
|
62 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 /tmp/1km_y${class}_tmp2_$filename.tif $OUTDIR/1km_y${class}_$filename.tif |
|
63 |
rm -f /tmp/1km_y${class}_tmp2_$filename.tif |
|
64 |
' _ |
|
65 |
|
|
66 |
rm -f $OUTDIR/tmp_$filename.tif |
|
67 |
|
|
68 |
|
|
69 |
checkjob -v $PBS_JOBID |
terrain/procedures/dem_variables/GFC2013/sc1_aggregation_treecover.sh | ||
---|---|---|
1 |
|
|
2 |
# cd /lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif |
|
3 |
# wget -i /lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/treecover2000.txt |
|
4 |
|
|
5 |
|
|
6 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif/Hansen_GFC2013_treecover2000_*.tif ; do qsub -v file=$file /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_aggregation_treecover.sh ; done |
|
7 |
|
|
8 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif/Hansen_GFC2013_treecover2000_*.tif ; do bash /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_aggregation_treecover.sh $file ; done |
|
9 |
|
|
10 |
|
|
11 |
|
|
12 |
#PBS -S /bin/bash |
|
13 |
#PBS -q fas_normal |
|
14 |
#PBS -l mem=1gb |
|
15 |
#PBS -l walltime=0:20:00 |
|
16 |
#PBS -l nodes=1:ppn=1 |
|
17 |
#PBS -V |
|
18 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
19 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
20 |
|
|
21 |
# file=$1 |
|
22 |
|
|
23 |
module load Tools/Python/2.7.3 |
|
24 |
module load Libraries/GDAL/1.10.0 |
|
25 |
module load Libraries/OSGEO/1.10.0 |
|
26 |
|
|
27 |
filename=$(basename $file .tif) |
|
28 |
|
|
29 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif |
|
30 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif_1km |
|
31 |
|
|
32 |
geo_string=$(getCorners4Gtranslate $file) |
|
33 |
ulx=$(echo $geo_string | awk '{ print sprintf("%.0f", $1 )}') # round the number to rounded cordinates |
|
34 |
uly=$(echo $geo_string | awk '{ print sprintf("%.0f", $2 )}') |
|
35 |
lrx=$(echo $geo_string | awk '{ print sprintf("%.0f", $3 )}') |
|
36 |
lry=$(echo $geo_string | awk '{ print sprintf("%.0f", $4 )}') |
|
37 |
|
|
38 |
|
|
39 |
# soutest tile smoler |
|
40 |
if [ ${filename:29:3} = '50S' ] ; then ysize=25200 ; else ysize=36000 ; fi |
|
41 |
|
|
42 |
gdal_translate -srcwin 0 0 36000 $ysize -a_ullr $ulx $uly $lrx $lry -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/$filename.tif $OUTDIR/tmp_$filename.tif |
|
43 |
|
|
44 |
pkfilter -co COMPRESS=LZW -ot Float32 -dx 30 -dy 30 -f median -d 30 -i $OUTDIR/tmp_$filename.tif -o $OUTDIR/1km_tmp_$filename.tif |
|
45 |
gdal_calc.py -A $OUTDIR/1km_tmp_$filename.tif --calc="(A * 100 )" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile $OUTDIR/1km_tmp2_$filename.tif |
|
46 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/1km_tmp2_$filename.tif $OUTDIR/1km_md_$filename.tif |
|
47 |
|
|
48 |
pkfilter -co COMPRESS=LZW -ot Float32 -dx 30 -dy 30 -f mean -d 30 -i $OUTDIR/tmp_$filename.tif -o $OUTDIR/1km_tmp_$filename.tif |
|
49 |
gdal_calc.py -A $OUTDIR/1km_tmp_$filename.tif --calc="(A * 100 )" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile $OUTDIR/1km_tmp2_$filename.tif |
|
50 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/1km_tmp2_$filename.tif $OUTDIR/1km_mn_$filename.tif |
|
51 |
rm $OUTDIR/tmp_$filename.tif $OUTDIR/1km_tmp_$filename.tif $OUTDIR/1km_tmp2_$filename.tif |
|
52 |
|
|
53 |
|
|
54 |
|
terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh_delate | ||
---|---|---|
1 |
|
|
2 |
# downlaod data from http://earthenginepartners.appspot.com/science-2013-global-forest/download.html |
|
3 |
# cd /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
4 |
# wget -i ../datamask.txt |
|
5 |
# create vrt |
|
6 |
# gdalbuildvrt datamask.vrt *.tif |
|
7 |
# create new tiles becouse the original one have 30001 pixel and can not be done an agregate opereation 30. |
|
8 |
# x 1296036 pixel west to east . togliendo 1 pixel per ogni tile 1296000 che e' divisibile per 30 |
|
9 |
# y |
|
10 |
# 10 tiles east to west ; each tile 129600 30m che 1 km diventano 4320 ; partenza da -180 a + 180 |
|
11 |
# 10 tiles east nord to south; each tile 46800 30m che 1 km diventano 1560 ; partenza + 80 - 50 |
|
12 |
# pixel size 0.000277777777778 30m |
|
13 |
# pixel size 0.002083333333333 1km |
|
14 |
|
|
15 |
# all fine si e' deciso di cancellare un pixel alla fine di ogni tile...forse da cambiare con un gdalwarp |
|
16 |
|
|
17 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_*.tif ; do bash /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh $file ; done |
|
18 |
|
|
19 |
|
|
20 |
|
|
21 |
|
|
22 |
|
|
23 |
# for file in /lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif/Hansen_GFC2013_datamask_*.tif ; do qsub -v file=$file /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc1_tiling.sh ; done |
|
24 |
|
|
25 |
|
|
26 |
|
|
27 |
|
|
28 |
|
|
29 |
#PBS -S /bin/bash |
|
30 |
#PBS -q fas_normal |
|
31 |
#PBS -l mem=1gb |
|
32 |
#PBS -l walltime=0:20:00 |
|
33 |
#PBS -l nodes=1:ppn=1 |
|
34 |
#PBS -V |
|
35 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
36 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
37 |
|
|
38 |
# file=$1 |
|
39 |
|
|
40 |
module load Tools/Python/2.7.3 |
|
41 |
module load Libraries/GDAL/1.10.0 |
|
42 |
module load Libraries/OSGEO/1.10.0 |
|
43 |
|
|
44 |
filename=$(basename $file .tif) |
|
45 |
|
|
46 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif |
|
47 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif_1km |
|
48 |
|
|
49 |
geo_string=$(getCorners4Gtranslate $file) |
|
50 |
ulx=$(echo $geo_string | awk '{ print sprintf("%.0f", $1 )}') # round the number to rounded cordinates |
|
51 |
uly=$(echo $geo_string | awk '{ print sprintf("%.0f", $2 )}') |
|
52 |
lrx=$(echo $geo_string | awk '{ print sprintf("%.0f", $3 )}') |
|
53 |
lry=$(echo $geo_string | awk '{ print sprintf("%.0f", $4 )}') |
|
54 |
|
|
55 |
|
|
56 |
# soutest tile smoler |
|
57 |
if [ ${filename:24:3} = '50S' ] ; then ysize=25200 ; else ysize=36000 ; fi |
|
58 |
|
|
59 |
gdal_translate -srcwin 0 0 36000 $ysize -a_ullr $ulx $uly $lrx $lry -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/$filename.tif $OUTDIR/tmp_$filename.tif |
|
60 |
pkfilter -co COMPRESS=LZW -ot Float32 -class 1 -dx 30 -dy 30 -f density -d 30 -i $OUTDIR/tmp_$filename.tif -o $OUTDIR/1km_tmp_$filename.tif |
|
61 |
gdal_calc.py -A $OUTDIR/1km_tmp_$filename.tif --calc="(A * 100 )" --co=COMPRESS=LZW --co=ZLEVEL=9 --overwrite --outfile $OUTDIR/1km_tmp2_$filename.tif |
|
62 |
gdal_translate -ot UInt16 -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/1km_tmp2_$filename.tif $OUTDIR/1km_$filename.tif |
|
63 |
rm $OUTDIR/tmp_$filename.tif $OUTDIR/1km_tmp_$filename.tif $OUTDIR/1km_tmp2_$filename.tif |
|
64 |
|
|
65 |
|
|
66 |
|
|
67 |
|
|
68 |
|
|
69 |
# checkjob -v $PBS_JOBID |
terrain/procedures/dem_variables/GFC2013/sc2_merge_datamask.sh | ||
---|---|---|
1 |
|
|
2 |
# qsub /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc2_merge_datamask.sh |
|
3 |
|
|
4 |
#PBS -S /bin/bash |
|
5 |
#PBS -q fas_normal |
|
6 |
#PBS -l mem=4gb |
|
7 |
#PBS -l walltime=1:00:00 |
|
8 |
#PBS -l nodes=1:ppn=1 |
|
9 |
#PBS -V |
|
10 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
11 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
12 |
|
|
13 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask/tif_1km |
|
14 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/datamask |
|
15 |
|
|
16 |
|
|
17 |
rm -f $OUTDIR/water_perc_NE.tif $OUTDIR/water_perc_SE.tif $OUTDIR/water_perc_NW.tif $OUTDIR/water_perc_SW.tif |
|
18 |
|
|
19 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_Hansen_GFC2013_datamask_??N_???E.tif -o $OUTDIR/water_perc_NE.tif |
|
20 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_Hansen_GFC2013_datamask_??S_???E.tif -o $OUTDIR/water_perc_SE.tif |
|
21 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_Hansen_GFC2013_datamask_??N_???W.tif -o $OUTDIR/water_perc_NW.tif |
|
22 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_Hansen_GFC2013_datamask_??S_???W.tif -o $OUTDIR/water_perc_SW.tif |
|
23 |
|
|
24 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $OUTDIR/water_perc_??.tif -o $OUTDIR/land_frequency_Hansen_GFC2013.tif |
|
25 |
|
|
26 |
|
|
27 |
|
|
28 |
gdal_edit.py -mo "Author=giuseppe.amatulli@gmail.com using pktools"\ |
|
29 |
-mo "Input dataset=Global Forest Change 2000-2012 (Hansen 2013)" \ |
|
30 |
-mo "Input layer=Data mask"\ |
|
31 |
-mo "Output=Land Frequency(%)"\ |
|
32 |
-mo "Offset=0" -mo "Scale=0.01" $OUTDIR/land_frequency_GFC2013.tif |
terrain/procedures/dem_variables/GFC2013/sc2_merge_lossyear.sh | ||
---|---|---|
1 |
|
|
2 |
# for YEAR in `seq 1 12` ; do qsub -v YEAR=$YEAR /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc2_merge_lossyear.sh ; done |
|
3 |
|
|
4 |
#PBS -S /bin/bash |
|
5 |
#PBS -q fas_normal |
|
6 |
#PBS -l mem=2gb |
|
7 |
#PBS -l walltime=1:00:00 |
|
8 |
#PBS -l nodes=1:ppn=1 |
|
9 |
#PBS -V |
|
10 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
11 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
12 |
|
|
13 |
|
|
14 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear/tif_1km |
|
15 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/lossyear |
|
16 |
|
|
17 |
# md stay for meadian |
|
18 |
# mn for mean |
|
19 |
|
|
20 |
rm -f /tmp/lossyear${YEAR}_NE.tif /tmp/lossyear${YEAR}_SE.tif /tmp/lossyear${YEAR}_NW.tif /tmp/lossyear${YEAR}_SW.tif /tmp/lossyear${YEAR}.tif |
|
21 |
|
|
22 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??N_???E.tif -o /tmp/lossyear${YEAR}_NE.tif |
|
23 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??S_???E.tif -o /tmp/lossyear${YEAR}_SE.tif |
|
24 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??N_???W.tif -o /tmp/lossyear${YEAR}_NW.tif |
|
25 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_y${YEAR}_Hansen_GFC2013_lossyear_??S_???W.tif -o /tmp/lossyear${YEAR}_SW.tif |
|
26 |
|
|
27 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 /tmp/lossyear${YEAR}_??.tif -o /tmp/lossyear${YEAR}.tif |
|
28 |
|
|
29 |
gdal_translate -co COMPRESS=LZW -co ZLEVEL=9 /tmp/lossyear${YEAR}.tif $OUTDIR/lossyear${YEAR}_frequency_GFC2013.tif |
|
30 |
|
|
31 |
rm -f /tmp/lossyear${YEAR}.tif /tmp/lossyear${YEAR}_NE.tif /tmp/lossyear${YEAR}_SE.tif /tmp/lossyear${YEAR}_NW.tif /tmp/lossyear${YEAR}_SW.tif |
|
32 |
|
|
33 |
YEARL=`expr 2000 + $YEAR` |
|
34 |
|
|
35 |
gdal_edit.py -mo "Author=giuseppe.amatulli@gmail.com using pktools"\ |
|
36 |
-mo "Input dataset=Global Forest Change 2000-2012 (Hansen 2013)"\ |
|
37 |
-mo "Input layer=Year of gross forest cover loss event"\ |
|
38 |
-mo "Output=Gross Forest Cover Loss during $YEARL in Frequency(%)"\ |
|
39 |
-mo "Offset=0" -mo "Scale=0.01" $OUTDIR/lossyear${YEARL}_frequency_GFC2013.tif |
|
40 |
|
|
41 |
checkjob -v $PBS_JOBID |
|
42 |
|
terrain/procedures/dem_variables/GFC2013/sc2_merge_treecover.sh | ||
---|---|---|
1 |
|
|
2 |
# for PAR in mn md ; do qsub -v PAR=$PAR /lustre0/scratch/ga254/scripts_bj/environmental-layers/terrain/procedures/dem_variables/GFC2013/sc2_merge_treecover.sh ; done |
|
3 |
|
|
4 |
#PBS -S /bin/bash |
|
5 |
#PBS -q fas_normal |
|
6 |
#PBS -l mem=4gb |
|
7 |
#PBS -l walltime=1:00:00 |
|
8 |
#PBS -l nodes=1:ppn=2 |
|
9 |
#PBS -V |
|
10 |
#PBS -o /lustre0/scratch/ga254/stdout |
|
11 |
#PBS -e /lustre0/scratch/ga254/stderr |
|
12 |
|
|
13 |
INDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000/tif_1km |
|
14 |
OUTDIR=/lustre0/scratch/ga254/dem_bj/GFC2013/treecover2000 |
|
15 |
|
|
16 |
# md stay for meadian |
|
17 |
# mn for mean |
|
18 |
|
|
19 |
rm -f /tmp/treecover_${PAR}_NE.tif /tmp/treecover_${PAR}_SE.tif /tmp/treecover_${PAR}_NW.tif /tmp/treecover_${PAR}_SW.tif |
|
20 |
|
|
21 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_${PAR}_Hansen_GFC2013_treecover2000_??N_???E.tif -o /tmp/treecover_${PAR}_NE.tif |
|
22 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_${PAR}_Hansen_GFC2013_treecover2000_??S_???E.tif -o /tmp/treecover_${PAR}_SE.tif |
|
23 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_${PAR}_Hansen_GFC2013_treecover2000_??N_???W.tif -o /tmp/treecover_${PAR}_NW.tif |
|
24 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 $INDIR/1km_${PAR}_Hansen_GFC2013_treecover2000_??S_???W.tif -o /tmp/treecover_${PAR}_SW.tif |
|
25 |
|
|
26 |
gdal_merge.py -co COMPRESS=LZW -co ZLEVEL=9 /tmp/treecover_${PAR}_??.tif -o /tmp/tree_${PAR}_frequency_GFC2013.tif |
|
27 |
|
|
28 |
gdal_translate -co COMPRESS=LZW -co ZLEVEL=9 /tmp/tree_${PAR}_frequency_GFC2013.tif $OUTDIR/tree_${PAR}_frequency_GFC2013.tif |
|
29 |
|
|
30 |
rm -f /tmp/tree_${PAR}_frequency_GFC2013.tif |
|
31 |
|
|
32 |
if [ $PAR = 'mn' ] ; then PAR1=Mean ; fi |
|
33 |
if [ $PAR = 'md' ] ; then PAR1=Median ; fi |
|
34 |
|
|
35 |
|
|
36 |
gdal_edit.py -mo "Author=giuseppe.amatulli@gmail.com using pktools"\ |
|
37 |
-mo "Input dataset=Global Forest Change 2000-2012 (Hansen 2013)"\ |
|
38 |
-mo "Input layer=Tree canopy cover for year 2000 (%)"\ |
|
39 |
-mo "Output=$PAR1 of Tree cover (%)"\ |
|
40 |
-mo "Offset=0" -mo "Scale=0.01" $OUTDIR/tree_${PAR}_percentage_GFC2013.tif |
|
41 |
|
Also available in: Unified diff
pusching solar update and Hansen tree cover aggregation