1
|
#### Script to facilitate processing of MOD06 data
|
2
|
|
3
|
setwd("/nobackupp1/awilso10/mod35")
|
4
|
|
5
|
library(rgdal)
|
6
|
library(raster)
|
7
|
library(RSQLite)
|
8
|
|
9
|
|
10
|
verbose=T
|
11
|
|
12
|
## get MODLAND tile information
|
13
|
tb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
|
14
|
tb$tile=paste("h",sprintf("%02d",tb$ih),"v",sprintf("%02d",tb$iv),sep="")
|
15
|
tb=tb[tb$lon_min!=-999,]
|
16
|
save(tb,file="modlandTiles.Rdata")
|
17
|
load("modlandTiles.Rdata")
|
18
|
|
19
|
## Choose some tiles to process
|
20
|
### list of tiles to process
|
21
|
tiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07") # South America
|
22
|
## a northern block of tiles
|
23
|
tiles=apply(expand.grid(paste("h",11:17,sep=""),v=c("v00","v01","v02","v03","v04")),1,function(x) paste(x,collapse="",sep=""))
|
24
|
## subset to MODLAND tiles
|
25
|
alltiles=system("ls -r MODTILES/ | grep tif$ | cut -c1-6 | sort | uniq - ",intern=T)
|
26
|
|
27
|
## subset to tiles in global region (not outside global boundary in sinusoidal projection)
|
28
|
tiles=tiles[tiles%in%alltiles]
|
29
|
|
30
|
## subset tile corner matrix to tiles selected above
|
31
|
tile_bb=tb[tb$tile%in%tiles,]
|
32
|
|
33
|
### get list of files to process
|
34
|
datadir="/nobackupp4/datapool/modis/MOD35_L2.006/"
|
35
|
|
36
|
outdir="daily/" #paste("daily/",tile,sep="")
|
37
|
|
38
|
##find swaths in region from sqlite database for the specified date/tile
|
39
|
## this takes a while, about 30 minutes, so only rebuild if you need to update what's available...
|
40
|
rebuildswathtable=F
|
41
|
if(rebuildswathtable){
|
42
|
## path to swath database
|
43
|
db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"
|
44
|
con=dbConnect("SQLite", dbname = db)
|
45
|
fs=do.call(rbind.data.frame,lapply(1:nrow(tile_bb),function(i){
|
46
|
d=dbGetQuery(con,paste("SELECT * from swath_geo6
|
47
|
WHERE east>=",tile_bb$lon_min[i]," AND
|
48
|
west<=",tile_bb$lon_max[i]," AND
|
49
|
north>=",tile_bb$lat_min[i]," AND
|
50
|
south<=",tile_bb$lat_max[i])
|
51
|
)
|
52
|
d$tile=tile_bb$tile[i]
|
53
|
print(paste("Finished tile",tile_bb$tile[i]))
|
54
|
return(d)
|
55
|
}))
|
56
|
con=dbDisconnect(con)
|
57
|
fs$id=substr(fs$id,7,19)
|
58
|
|
59
|
## Identify which swaths are available in the datapool
|
60
|
swaths=data.frame(path=list.files(datadir,pattern=paste("hdf$"),recursive=T,full=T),stringsAsFactors=F) #all swaths in data pool
|
61
|
swaths$id=substr(basename(swaths$path),10,22)
|
62
|
fs$exists=fs$id%in%swaths$id
|
63
|
fs$path=swaths$path[match(fs$id,swaths$id)]
|
64
|
|
65
|
## write tile-swath list to disk
|
66
|
save(fs,swaths,file="swathtile.Rdata")
|
67
|
}
|
68
|
|
69
|
load("swathtile.Rdata")
|
70
|
|
71
|
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database"))
|
72
|
|
73
|
## get all unique dates
|
74
|
fs$dateid=format(as.Date(paste(fs$year,fs$day,sep=""),"%Y%j"),"%Y%m%d")
|
75
|
#alldates=unique(fs$dateid[fs$exists])
|
76
|
|
77
|
#### Generate submission file
|
78
|
startdate="2000-03-01"
|
79
|
stopdate="2011-12-31"
|
80
|
## just 2005-2010
|
81
|
startdate="2009-01-01"
|
82
|
stopdate="2009-12-31"
|
83
|
|
84
|
alldates=format(seq(as.Date(startdate),as.Date(stopdate),1),"%Y%m%d")
|
85
|
|
86
|
proclist=expand.grid(date=alldates,tile=tiles)
|
87
|
proclist$year=substr(proclist$date,1,4)
|
88
|
|
89
|
## identify tile-dates with no available swaths
|
90
|
avail=unique(cbind.data.frame(tile=fs$tile,date=fs$dateid)[fs$exists, ])
|
91
|
proclist$avail=paste(proclist$tile,proclist$date,sep="_")%in%paste(avail$tile,avail$date,sep="_")
|
92
|
|
93
|
## identify which have been completed
|
94
|
#fdone=data.frame(path=system("ssh lou 'find MOD35/daily -name \"*.nc\"' ",intern=T))
|
95
|
fdone=data.frame(path=list.files(outdir,pattern="nc$",recursive=T))
|
96
|
fdone$date=substr(basename(as.character(fdone$path)),14,21)
|
97
|
fdone$tile=substr(basename(as.character(fdone$path)),7,12)
|
98
|
proclist$done=paste(proclist$tile,proclist$date,sep="_")%in%substr(basename(as.character(fdone$path)),7,21)
|
99
|
|
100
|
### report on what has already been processed
|
101
|
print(paste(sum(!proclist$done)," out of ",nrow(proclist)," (",round(100*sum(!proclist$done)/nrow(proclist),2),"%) remain"))
|
102
|
table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
|
103
|
table(table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done]))
|
104
|
|
105
|
### explore tile counts
|
106
|
#x=table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
|
107
|
#x=x[order(rownames(x)),]
|
108
|
|
109
|
script="/u/awilso10/environmental-layers/climate/procedures/MOD35_L2_process.r"
|
110
|
|
111
|
## write the table processed by mpiexec
|
112
|
tp=T # rerun everything
|
113
|
tp=((!proclist$done)&proclist$avail) #date-tiles to process
|
114
|
table(Available=proclist$avail,Completed=proclist$done)
|
115
|
|
116
|
write.table(paste("--verbose ",script," --date ",proclist$date[tp]," --verbose T --tile ",proclist$tile[tp],sep=""),
|
117
|
file=paste("notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
|
118
|
|
119
|
### qsub script
|
120
|
cat(paste("
|
121
|
#PBS -S /bin/bash
|
122
|
#PBS -l select=20:ncpus=8:mpiprocs=8
|
123
|
##PBS -l select=100:ncpus=8:mpiprocs=8
|
124
|
##PBS -l walltime=8:00:00
|
125
|
#PBS -l walltime=4:00:00
|
126
|
#PBS -j n
|
127
|
#PBS -m be
|
128
|
#PBS -N mod35
|
129
|
#PBS -q normal
|
130
|
##PBS -q devel
|
131
|
#PBS -V
|
132
|
|
133
|
#CORES=800
|
134
|
CORES=160
|
135
|
|
136
|
HDIR=/u/armichae/pr/
|
137
|
source $HDIR/etc/environ.sh
|
138
|
source /u/awilso10/environ.sh
|
139
|
source /u/awilso10/.bashrc
|
140
|
IDIR=/nobackupp1/awilso10/mod35/
|
141
|
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
|
142
|
WORKLIST=$IDIR/notdone.txt
|
143
|
EXE=Rscript
|
144
|
LOGSTDOUT=$IDIR/log/mod35_stdout
|
145
|
LOGSTDERR=$IDIR/log/mod35_stderr
|
146
|
### use mpiexec to parallelize across days
|
147
|
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
|
148
|
",sep=""),file=paste("mod35_qsub",sep=""))
|
149
|
|
150
|
### Check the files
|
151
|
system(paste("cat mod35_qsub",sep=""))
|
152
|
system(paste("cat notdone.txt | head",sep=""))
|
153
|
system(paste("cat notdone.txt | wc -l ",sep=""))
|
154
|
|
155
|
## Submit it
|
156
|
system(paste("qsub mod35_qsub",sep=""))
|
157
|
|
158
|
system("qstat -u awilso10")
|
159
|
|
160
|
#######################################################
|
161
|
### Now submit the script to generate the climatologies
|
162
|
|
163
|
## report 'mostly' finished tiles
|
164
|
## this relyies on proclist above so be sure to update above before running
|
165
|
md=table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done])
|
166
|
mdt=names(md[md<10,])
|
167
|
tiles=mdt
|
168
|
|
169
|
tiles
|
170
|
ctiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07") # South America
|
171
|
|
172
|
ctiles=tiles#[c(1:3)] #subset to only some tiles (for example if some aren't finished yet)?
|
173
|
climatescript="/pleiades/u/awilso10/environmental-layers/climate/procedures/MOD35_Climatology.r"
|
174
|
|
175
|
## check which tiles have been processed and are on lou with a filename "MOD35_[tile].nc"
|
176
|
cdone=data.frame(path="",tile="") #use this if you want to re-run everything
|
177
|
cdone=data.frame(path=sapply(strsplit(basename(
|
178
|
system("ssh lou 'find MOD35/summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"' ",intern=T)),split="_"),function(x) x[2]))
|
179
|
cdone=data.frame(path=sapply(strsplit(basename(
|
180
|
system("find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"",intern=T)),split="_"),function(x) x[2]))
|
181
|
cdone$tile=substr(basename(as.character(cdone$path)),1,6)
|
182
|
print(paste(length(ctiles[!ctiles%in%cdone$tile]),"Tiles still need to be processed"))
|
183
|
|
184
|
## write the table processed by mpiexec
|
185
|
write.table(paste("--verbose ",climatescript," --verbose T --tile ",ctiles[!ctiles%in%cdone$tile],sep=""),
|
186
|
file=paste("notdone_climate.txt",sep=""),row.names=F,col.names=F,quote=F)
|
187
|
|
188
|
## delay start until previous jobs have finished?
|
189
|
delay=F
|
190
|
## check running jobs to get JobID of job you want to wait for
|
191
|
system("qstat -u awilso10")
|
192
|
## enter JobID here:
|
193
|
job="881394.pbspl1.nas.nasa.gov"
|
194
|
|
195
|
### qsub script
|
196
|
cat(paste("
|
197
|
#PBS -S /bin/bash
|
198
|
#PBS -l select=10:ncpus=8:mem=94
|
199
|
#PBS -l walltime=2:00:00
|
200
|
#PBS -j n
|
201
|
#PBS -m be
|
202
|
#PBS -N mod35_climate
|
203
|
#PBS -q devel
|
204
|
##PBS -q normal
|
205
|
##PBS -q ldan
|
206
|
#PBS -V
|
207
|
",if(delay) paste("#PBS -W depend=afterany:",job,sep=""),"
|
208
|
|
209
|
CORES=80
|
210
|
HDIR=/u/armichae/pr/
|
211
|
source $HDIR/etc/environ.sh
|
212
|
source /pleiades/u/awilso10/environ.sh
|
213
|
source /pleiades/u/awilso10/.bashrc
|
214
|
IDIR=/nobackupp1/awilso10/mod35/
|
215
|
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
|
216
|
WORKLIST=$IDIR/notdone_climate.txt
|
217
|
EXE=Rscript
|
218
|
LOGSTDOUT=$IDIR/log/climatology_stdout
|
219
|
LOGSTDERR=$IDIR/log/climatology_stderr
|
220
|
### use mpiexec to parallelize across tiles
|
221
|
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
|
222
|
",sep=""),file=paste("mod35_climatology_qsub",sep=""))
|
223
|
|
224
|
## check files
|
225
|
system(paste("cat mod35_climatology_qsub",sep="")) #qsub submission script
|
226
|
system(paste("cat notdone_climate.txt | head",sep="")) #top of job file
|
227
|
system(paste("cat notdone_climate.txt | wc -l ",sep="")) #number of jobs to be run
|
228
|
|
229
|
## Submit it
|
230
|
system(paste("qsub mod35_climatology_qsub",sep=""))
|
231
|
|
232
|
## check progress
|
233
|
system("qstat -u awilso10")
|
234
|
|
235
|
## start interactive job on compute node for debugging
|
236
|
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
|
237
|
|
238
|
|
239
|
#################################################################
|
240
|
### copy the files back to Yale
|
241
|
|
242
|
|
243
|
system("ssh lou")
|
244
|
#scp `find MOD35/summary -name "MOD35_h[0-9][0-9]v[0-9][0-9].nc"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/
|
245
|
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9]_mean.nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
|
246
|
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
|
247
|
|
248
|
|
249
|
system("gdalbuildvrt MOD35C6_2009.vrt summary/*2009mean.nc ")
|
250
|
system("gdal_translate -stats -co \"COMPRESS=LZW\" -of GTiff MOD35C6_2009.vrt MOD35C6_2009.tif ")
|
251
|
system("scp MOD35C6_2009.tif adamw@acrobates.eeb.24.177.10.190:/Users/adamw/Downloads/")
|
252
|
exit
|
253
|
|
254
|
|