1 |
1 |
#### Script to facilitate processing of MOD06 data
|
2 |
|
|
3 |
|
setwd("/nobackupp1/awilso10/mod06")
|
|
2 |
|
|
3 |
setwd("/nobackupp1/awilso10/mod06")
|
4 |
4 |
|
5 |
5 |
library(rgdal)
|
6 |
6 |
library(raster)
|
|
7 |
library(RSQLite)
|
|
8 |
|
|
9 |
|
|
10 |
verbose=T
|
7 |
11 |
|
8 |
12 |
## get MODLAND tile information
|
9 |
|
tb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
|
|
13 |
txbb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
|
10 |
14 |
tb$tile=paste("h",sprintf("%02d",tb$ih),"v",sprintf("%02d",tb$iv),sep="")
|
11 |
15 |
save(tb,file="modlandTiles.Rdata")
|
|
16 |
load("modlandTiles.Rdata")
|
12 |
17 |
|
13 |
18 |
## delete temporary log file that can grow to GB
|
14 |
19 |
system("rm /nobackupp1/awilso10/software/heg/TOOLKIT_MTD/runtime/LogStatus")
|
15 |
20 |
|
|
21 |
|
16 |
22 |
tile="h11v08" # Venezuela
|
17 |
23 |
#tile="h11v07" # Venezuela coast
|
18 |
24 |
#tile="h09v04" # Oregon
|
19 |
25 |
tile="h21v09" #Kenya
|
20 |
26 |
|
|
27 |
|
|
28 |
### list of tiles to process
|
|
29 |
tiles=c("h11v08","h21v09","h08v04","h09v04","h08v05","h09v05","h20v11","h31v11")
|
|
30 |
tiles=tiles[c(5,7,8)]
|
|
31 |
tile_bb=tb[tb$tile%in%tiles,]
|
|
32 |
|
21 |
33 |
### get list of files to process
|
22 |
34 |
datadir="/nobackupp4/datapool/modis/MOD06_L2.005/"
|
23 |
35 |
#datadir="/nobackupp1/awilso10/mod06/data" #for data downloaded from
|
24 |
36 |
|
25 |
|
outdir=paste("daily/",tile,sep="")
|
|
37 |
outdir="daily/" #paste("daily/",tile,sep="")
|
26 |
38 |
|
27 |
39 |
##find swaths in region from sqlite database for the specified date/tile
|
28 |
40 |
## path to swath database
|
29 |
41 |
db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"
|
30 |
42 |
con=dbConnect("SQLite", dbname = db)
|
31 |
|
fs=dbGetQuery(con,paste("SELECT * from swath_geo
|
32 |
|
WHERE east>=",tile_bb$lon_min," AND
|
33 |
|
west<=",tile_bb$lon_max," AND
|
34 |
|
north>=",tile_bb$lat_min," AND
|
35 |
|
south<=",tile_bb$lat_max)
|
|
43 |
fs=do.call(rbind.data.frame,lapply(1:nrow(tile_bb),function(i){
|
|
44 |
d=dbGetQuery(con,paste("SELECT * from swath_geo
|
|
45 |
WHERE east>=",tile_bb$lon_min[i]," AND
|
|
46 |
west<=",tile_bb$lon_max[i]," AND
|
|
47 |
north>=",tile_bb$lat_min[i]," AND
|
|
48 |
south<=",tile_bb$lat_max[i])
|
36 |
49 |
)
|
|
50 |
d$tile=tile_bb$tile[i]
|
|
51 |
print(paste("Finished tile",tile_bb$tile[i]))
|
|
52 |
return(d)
|
|
53 |
}))
|
37 |
54 |
con=dbDisconnect(con)
|
38 |
55 |
fs$id=substr(fs$id,7,19)
|
39 |
56 |
|
... | ... | |
51 |
68 |
alldates=unique(fs$dateid[fs$exists])
|
52 |
69 |
|
53 |
70 |
#### Generate submission file
|
|
71 |
alldates=format(seq(as.Date("2000-03-01"),as.Date("2011-12-31"),1),"%Y%m%d")
|
|
72 |
proclist=expand.grid(date=alldates,tile=tiles)
|
|
73 |
proclist$year=substr(proclist$date,1,4)
|
|
74 |
|
54 |
75 |
## identify which have been completed
|
55 |
|
fdone=list.files(outdir,pattern="nc$")
|
56 |
|
done=alldates%in%substr(fdone,14,21)
|
|
76 |
fdone=data.frame(path=list.files(outdir,pattern="nc$",recursive=T))
|
|
77 |
fdone$date=substr(basename(as.character(fdone$path)),14,21)
|
|
78 |
fdone$tile=substr(basename(as.character(fdone$path)),14,21)
|
|
79 |
|
|
80 |
## identify which date-tiles have already been run
|
|
81 |
proclist$done=paste(proclist$tile,proclist$date,sep="_")%in%substr(basename(as.character(fdone$path)),7,21)
|
57 |
82 |
|
58 |
83 |
### report on what has already been processed
|
59 |
|
print(paste(table(done)[2]," out of",length(alldates),
|
60 |
|
"(",round(100*table(done)[2]/length(alldates),1),
|
61 |
|
"%) dates for tile",tile,
|
62 |
|
"have been processed. Breakdown by year of completed days:"))
|
63 |
|
print(table(substr(alldates[done],1,4)))
|
|
84 |
print(paste("Overview of completed tile-days (",round(sum(proclist$done)/nrow(proclist),2),"%)"))
|
|
85 |
table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
|
|
86 |
|
64 |
87 |
|
65 |
88 |
#updatedone=F #update the "done" list using the
|
66 |
89 |
#if(updatedone&exists("fdly")){ #update using table from below
|
... | ... | |
69 |
92 |
|
70 |
93 |
## Identify which dates still need to be processed
|
71 |
94 |
## This vector will be used to tell mpiexec which days to include
|
72 |
|
notdone=alldates[!done]
|
|
95 |
#notdone=alldates[!done]
|
73 |
96 |
|
74 |
97 |
script="/u/awilso10/environmental-layers/climate/procedures/MOD06_L2_process.r"
|
75 |
98 |
climatescript="/u/awilso10/environmental-layers/climate/procedures/MOD06_Climatology.r"
|
76 |
99 |
|
77 |
100 |
## write the table processed by mpiexec
|
78 |
|
write.table(paste("--verbose ",script," --date ",notdone," --verbose T --tile \"",tile,"\"",sep=""),file=paste(tile,"_notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
|
|
101 |
write.table(paste("--verbose ",script," --date ",proclist$date[!proclist$done]," --verbose T --tile ",proclist$tile[!proclist$done],sep=""),
|
|
102 |
file=paste("notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
|
79 |
103 |
|
80 |
104 |
### qsub script
|
81 |
105 |
cat(paste("
|
82 |
106 |
#PBS -S /bin/bash
|
83 |
107 |
#PBS -l select=50:ncpus=8:mpiprocs=8
|
|
108 |
##PBS -l select=50:ncpus=8:mpiprocs=8
|
84 |
109 |
##PBS -l select=2:ncpus=4:mpiprocs=4
|
85 |
|
#PBS -l walltime=2:00:00
|
|
110 |
#PBS -l walltime=5:00:00
|
86 |
111 |
#PBS -j n
|
87 |
112 |
#PBS -m be
|
88 |
113 |
#PBS -N mod06
|
89 |
|
#PBS -q devel
|
|
114 |
#PBS -q normal
|
90 |
115 |
#PBS -V
|
91 |
116 |
|
92 |
117 |
CORES=400
|
... | ... | |
95 |
120 |
source /u/awilso10/.bashrc
|
96 |
121 |
IDIR=/nobackupp1/awilso10/mod06/
|
97 |
122 |
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
|
98 |
|
WORKLIST=$IDIR/",tile,"_notdone.txt
|
|
123 |
WORKLIST=$IDIR/notdone.txt
|
99 |
124 |
EXE=Rscript
|
100 |
|
LOGSTDOUT=$IDIR/log/",tile,"_stdout
|
101 |
|
LOGSTDERR=$IDIR/log/",tile,"_stderr
|
|
125 |
LOGSTDOUT=$IDIR/log/swath_stdout
|
|
126 |
LOGSTDERR=$IDIR/log/swath_stderr
|
102 |
127 |
### use mpiexec to parallelize across days
|
103 |
128 |
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
|
104 |
|
### Now process the climatologies
|
105 |
|
Rscript --verbose ",climatescript," --verbose T --tile \"",tile,"\"
|
106 |
|
",sep=""),file=paste(tile,"_mod06_qsub",sep=""))
|
|
129 |
",sep=""),file=paste("mod06_qsub",sep=""))
|
107 |
130 |
|
108 |
131 |
|
109 |
|
### Check the file
|
110 |
|
system(paste("cat ",tile,"_mod06_qsub",sep=""))
|
|
132 |
### Check the files
|
|
133 |
system(paste("cat mod06_qsub",sep=""))
|
|
134 |
system(paste("cat notdone.txt | head",sep=""))
|
|
135 |
system(paste("cat notdone.txt | wc -l ",sep=""))
|
111 |
136 |
|
112 |
|
## Submit it (and keep the pid)!
|
113 |
|
system(paste("qsub ",tile,"_mod06_qsub",sep=""))
|
|
137 |
## Submit it
|
|
138 |
system(paste("qsub mod06_qsub",sep=""))
|
114 |
139 |
|
115 |
|
## work in interactive mode
|
116 |
|
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
|
117 |
|
# mpirun -np 1 -r ssh R --no-save
|
|
140 |
#######################################################
|
|
141 |
### Now submit the script to generate the climatologies
|
|
142 |
|
|
143 |
tiles
|
|
144 |
ctiles=tiles[c(1,3)] #subset to only some tiles (for example if some aren't finished yet)?
|
|
145 |
climatescript="/u/awilso10/environmental-layers/climate/procedures/MOD06_Climatology.r"
|
|
146 |
|
|
147 |
## write the table processed by mpiexec
|
|
148 |
write.table(paste("--verbose ",climatescript," --verbose T --tile ",ctiles,sep=""),
|
|
149 |
file=paste("notdone_climate.txt",sep=""),row.names=F,col.names=F,quote=F)
|
|
150 |
|
|
151 |
### qsub script
|
|
152 |
cat(paste("
|
|
153 |
#PBS -S /bin/bash
|
|
154 |
#PBS -l select=1:ncpus=8:mpiprocs=8
|
|
155 |
##PBS -l select=2:ncpus=4:mpiprocs=4
|
|
156 |
#PBS -l walltime=5:00:00
|
|
157 |
#PBS -j n
|
|
158 |
#PBS -m be
|
|
159 |
#PBS -N mod06_climate
|
|
160 |
#PBS -q normal
|
|
161 |
#PBS -V
|
|
162 |
|
|
163 |
CORES=8
|
|
164 |
HDIR=/u/armichae/pr/
|
|
165 |
source $HDIR/etc/environ.sh
|
|
166 |
source /u/awilso10/.bashrc
|
|
167 |
IDIR=/nobackupp1/awilso10/mod06/
|
|
168 |
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
|
|
169 |
WORKLIST=$IDIR/notdone_climate.txt
|
|
170 |
EXE=Rscript
|
|
171 |
LOGSTDOUT=$IDIR/log/climatology_stdout
|
|
172 |
LOGSTDERR=$IDIR/log/climatology_stderr
|
|
173 |
### use mpiexec to parallelize across days
|
|
174 |
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
|
|
175 |
### Now process the climatologies
|
|
176 |
",sep=""),file=paste("mod06_climatology_qsub",sep=""))
|
|
177 |
|
|
178 |
## check files
|
|
179 |
system(paste("cat mod06_climatology_qsub",sep="")) #qsub submission script
|
|
180 |
system(paste("cat notdone_climate.txt | head",sep="")) #top of job file
|
|
181 |
system(paste("cat notdone_climate.txt | wc -l ",sep="")) #number of jobs to be run
|
|
182 |
|
|
183 |
## Submit it
|
|
184 |
system(paste("qsub mod06_climatology_qsub",sep=""))
|
118 |
185 |
|
119 |
186 |
## check progress
|
120 |
187 |
system("qstat -u awilso10")
|
121 |
188 |
|
|
189 |
## start interactive job on compute node for debugging
|
|
190 |
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
|
|
191 |
|
122 |
192 |
|
123 |
193 |
#################################################################
|
124 |
194 |
### copy the files back to Yale
|
125 |
195 |
summarydir="summary"
|
126 |
196 |
|
|
197 |
sumfiles=list.files("summary",pattern="^MOD06_.*[0-9][.]nc",full=T)
|
127 |
198 |
|
|
199 |
system(paste("scp ",paste(sumfiles,collapse=" ")," adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep=""))
|
128 |
200 |
|
129 |
|
system(paste("scp ",summarydir,"/MOD06_",tile,".nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep=""))
|
130 |
|
|
131 |
|
system(paste("scp ",tsdir,"/MOD06_",tile,"*.nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep=""))
|
132 |
|
system(paste("scp ",paste(fs$path[40421:40422],collapse=" ")," adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/swaths",sep=""))
|
|
201 |
#system(paste("scp ",tsdir,"/MOD06_",tile,"*.nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep=""))
|
|
202 |
#system(paste("scp ",paste(fs$path[40421:40422],collapse=" ")," adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/swaths",sep=""))
|
133 |
203 |
|
134 |
204 |
|
135 |
205 |
|
Updated submit script to process multiple tiles and days in the same batch. Also separated the daily processing from the climatologies