Project

General

Profile

Download (10.3 KB) Statistics
| Branch: | Revision:
1
#### Script to facilitate processing of MOD06 data
2
### This script is meant to be run iteratively, rather than unsupervised. There are several steps that require manual checking (such as choosing the number of cores, etc.)
3

    
4
## working directory
5
setwd("/nobackupp1/awilso10/mod35")
6

    
7
## load libraries
8
library(rgdal)
9
library(raster)
10
library(RSQLite)
11

    
12
## flag to increase verbosity of output
13
verbose=T
14

    
15
## get MODLAND tile information
16
tb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
17
tb$tile=paste("h",sprintf("%02d",tb$ih),"v",sprintf("%02d",tb$iv),sep="")
18
tb=tb[tb$lon_min!=-999,]
19
save(tb,file="modlandTiles.Rdata")
20
load("modlandTiles.Rdata")
21

    
22
## Choose some tiles to process
23
### list of tiles to process
24
tiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
25
## or a northern block of tiles
26
tiles=apply(expand.grid(paste("h",11:17,sep=""),v=c("v00","v01","v02","v03","v04")),1,function(x) paste(x,collapse="",sep=""))
27
## subset to MODLAND tiles
28
alltiles=system("ls -r MODTILES/ | grep tif$ | cut -c1-6 | sort | uniq - ",intern=T)
29

    
30
## or run all tiles
31
#tiles=alltiles
32

    
33
## subset to tiles in global region (not outside global boundary in sinusoidal projection)
34
tiles=tiles[tiles%in%alltiles]
35

    
36
## subset tile corner matrix to tiles selected above
37
tile_bb=tb[tb$tile%in%tiles,]
38

    
39
### get list of files to process
40
datadir="/nobackupp4/datapool/modis/MOD35_L2.006/"
41

    
42
outdir="daily/" #paste("daily/",tile,sep="")
43

    
44
##find swaths in region from sqlite database for the specified tile
45
## this takes a while, about 30 minutes, so only rebuild if you need to update what's available...
46
rebuildswathtable=F
47
if(rebuildswathtable){
48
  ## path to swath database
49
  db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"
50
  con=dbConnect("SQLite", dbname = db)
51
  fs=do.call(rbind.data.frame,lapply(1:nrow(tile_bb),function(i){
52
    d=dbGetQuery(con,paste("SELECT * from swath_geo6
53
            WHERE east>=",tile_bb$lon_min[i]," AND
54
                  west<=",tile_bb$lon_max[i]," AND
55
                  north>=",tile_bb$lat_min[i]," AND
56
                  south<=",tile_bb$lat_max[i])
57
      )
58
    d$tile=tile_bb$tile[i]
59
    print(paste("Finished tile",tile_bb$tile[i]))
60
    return(d)
61
  }))
62
  con=dbDisconnect(con)
63
  fs$id=substr(fs$id,7,19)
64

    
65
  ## Identify which swaths are available in the datapool
66
  swaths=data.frame(path=list.files(datadir,pattern=paste("hdf$"),recursive=T,full=T),stringsAsFactors=F)  #all swaths in data pool
67
  swaths$id=substr(basename(swaths$path),10,22)
68
  fs$exists=fs$id%in%swaths$id 
69
  fs$path=swaths$path[match(fs$id,swaths$id)]
70

    
71
  ## write tile-swath list to disk
72
  save(fs,swaths,file="swathtile.Rdata")
73
}
74

    
75
if(!exists("fs")) load("swathtile.Rdata")
76

    
77
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database"))
78

    
79
## get all unique dates
80
fs$dateid=format(as.Date(paste(fs$year,fs$day,sep=""),"%Y%j"),"%Y%m%d")
81
#alldates=unique(fs$dateid[fs$exists])
82

    
83
#### Generate submission file
84
startdate="2000-03-01"
85
stopdate="2011-12-31"
86
## just 2005-2010
87
startdate="2009-01-01"
88
stopdate="2009-12-31"
89

    
90
alldates=format(seq(as.Date(startdate),as.Date(stopdate),1),"%Y%m%d")
91

    
92
proclist=expand.grid(date=alldates,tile=tiles)
93
proclist$year=substr(proclist$date,1,4)
94

    
95
 ## identify tile-dates with no available swaths
96
avail=unique(cbind.data.frame(tile=fs$tile,date=fs$dateid)[fs$exists, ])
97
proclist$avail=paste(proclist$tile,proclist$date,sep="_")%in%paste(avail$tile,avail$date,sep="_")
98

    
99
## identify which have been completed
100
#fdone=data.frame(path=system("ssh lou 'find MOD35/daily -name \"*.nc\"' ",intern=T))
101
fdone=data.frame(path=list.files(outdir,pattern="nc$",recursive=T))
102
fdone$date=substr(basename(as.character(fdone$path)),14,21)
103
fdone$tile=substr(basename(as.character(fdone$path)),7,12)
104
proclist$done=paste(proclist$tile,proclist$date,sep="_")%in%substr(basename(as.character(fdone$path)),7,21)
105

    
106
### report on what has already been processed
107
print(paste(sum(!proclist$done)," out of ",nrow(proclist)," (",round(100*sum(!proclist$done)/nrow(proclist),2),"%) remain"))
108
stem(table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done]))
109
#table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
110
table(table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done]))
111

    
112
### explore tile counts
113
#x=table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
114
#x=x[order(rownames(x)),]
115

    
116
script="/u/awilso10/environmental-layers/climate/procedures/MOD35_L2_process.r"
117
 
118
## write the table processed by mpiexec
119
tp=T  # rerun everything
120
tp=((!proclist$done)&proclist$avail)  #date-tiles to process
121
table(Available=proclist$avail,Completed=proclist$done)
122
table(tp)
123

    
124
write.table(paste("--verbose ",script," --date ",proclist$date[tp]," --verbose T --profile F --tile ",proclist$tile[tp],sep=""),
125
file=paste("notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
126

    
127
## try running it once for a single tile-date to get estimate of time/tile-day
128
test=F
129
if(test){
130
  i=2
131
  time1=system.time(system(paste("Rscript --verbose ",script," --date ",proclist$date[i]," --profile T --verbose T --tile ",proclist$tile[i],sep="")))
132
  hours=round(length(proclist$date[tp])*142/60/60); hours
133
  hours=round(length(proclist$date[tp])*time1[3]/60/60,1); hours
134
  nodes=100
135
  threads=nodes*8
136
  writeLines(paste(" ################### \n Hours per date-tile:",round(time1[3]/60/60,2),"\n Date-tiles to process:",sum(tp)," \n Estimated CPU time: ",hours,"hours \n  With ",threads,"threads:",round(hours/threads,2),"hours \n ###################"))
137
}
138

    
139
### Set up submission script
140
queue="devel"
141
queue="normal" #"devel"
142
nodes=50
143
walltime=2
144

    
145
### write qsub script to disk
146
cat(paste("
147
#PBS -S /bin/bash
148
#PBS -l select=",nodes,":ncpus=8:mpiprocs=8
149
#PBS -l walltime=",walltime,":00:00
150
#PBS -j n
151
#PBS -m be
152
#PBS -N mod35
153
#PBS -q ",queue,"
154
#PBS -V
155

    
156
CORES=",nodes*8,"
157
HDIR=/u/armichae/pr/
158
  source $HDIR/etc/environ.sh
159
  source /u/awilso10/environ.sh
160
  source /u/awilso10/.bashrc
161
IDIR=/nobackupp1/awilso10/mod35/
162
WORKLIST=$IDIR/notdone.txt
163
EXE=Rscript
164
LOGSTDOUT=$IDIR/log/mod35_stdout
165
LOGSTDERR=$IDIR/log/mod35_stderr
166
### use mpiexec to parallelize across days
167
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
168
",sep=""),file=paste("mod35_qsub",sep=""))
169

    
170
### Check the files
171
system(paste("cat mod35_qsub",sep=""))
172
system(paste("cat notdone.txt | head -n 4",sep=""))
173
system(paste("cat notdone.txt | wc -l ",sep=""))
174

    
175
## start interactive job on compute node for debugging
176
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
177

    
178

    
179
## Submit it
180
system(paste("qsub mod35_qsub",sep=""))
181

    
182
system("qstat -u awilso10")
183

    
184
#######################################################
185
### Now submit the script to generate the climatologies
186

    
187
## report 'mostly' finished tiles
188
## this relies on proclist above so be sure to update above before running
189
md=table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done])
190
mdt=names(md[md<10,])
191
tiles=mdt
192

    
193
tiles
194
ctiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
195

    
196
ctiles=tiles#[c(1:3)]  #subset to only some tiles (for example if some aren't finished yet)?
197
climatescript="/pleiades/u/awilso10/environmental-layers/climate/procedures/MOD35_Climatology.r"
198

    
199
## check which tiles have been processed and are on lou with a filename "MOD35_[tile].nc"
200
cdone=data.frame(path="",tile="")  #use this if you want to re-run everything
201
cdone=data.frame(path=sapply(strsplit(basename(
202
                   system("ssh lou 'find MOD35/summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"' ",intern=T)),split="_"),function(x) x[2]))
203
cdone=data.frame(path=sapply(strsplit(basename(
204
                   system("find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"",intern=T)),split="_"),function(x) x[2]))
205
cdone$tile=substr(basename(as.character(cdone$path)),1,6)
206
print(paste(length(ctiles[!ctiles%in%cdone$tile]),"Tiles still need to be processed"))
207

    
208
## write the table processed by mpiexec
209
write.table(paste("--verbose ",climatescript," --verbose T --tile ",ctiles[!ctiles%in%cdone$tile],sep=""),
210
file=paste("notdone_climate.txt",sep=""),row.names=F,col.names=F,quote=F)
211

    
212
## delay start until previous jobs have finished?
213
delay=F
214
## check running jobs to get JobID of job you want to wait for
215
system("qstat -u awilso10",intern=T)
216
## enter JobID here:
217
job="2031668.pbspl1.nas.nasa.gov"
218

    
219
### qsub script
220
cat(paste("
221
#PBS -S /bin/bash
222
#PBS -l select=4:ncpus=8:mem=94
223
#PBS -l walltime=2:00:00
224
#PBS -j n
225
#PBS -m be
226
#PBS -N mod35_climate
227
#PBS -q devel
228
##PBS -q normal
229
##PBS -q ldan
230
#PBS -V
231
",if(delay) paste("#PBS -W depend=afterany:",job,sep="")," 
232

    
233
CORES=32
234
HDIR=/u/armichae/pr/
235
  source $HDIR/etc/environ.sh
236
  source /pleiades/u/awilso10/environ.sh
237
  source /pleiades/u/awilso10/.bashrc
238
IDIR=/nobackupp1/awilso10/mod35/
239
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
240
WORKLIST=$IDIR/notdone_climate.txt
241
EXE=Rscript
242
LOGSTDOUT=$IDIR/log/climatology_stdout
243
LOGSTDERR=$IDIR/log/climatology_stderr
244
### use mpiexec to parallelize across tiles
245
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
246
",sep=""),file=paste("mod35_climatology_qsub",sep=""))
247

    
248
## check files
249
system(paste("cat mod35_climatology_qsub",sep=""))        #qsub submission script
250
system(paste("cat notdone_climate.txt | head",sep=""))    #top of job file
251
system(paste("cat notdone_climate.txt | wc -l ",sep=""))  #number of jobs to be run
252

    
253
## Submit it
254
system(paste("qsub mod35_climatology_qsub",sep=""))
255

    
256
## check progress
257
system("qstat -u awilso10")
258

    
259

    
260

    
261
#################################################################
262
### copy the files back to Yale
263

    
264

    
265
#system("ssh lou")
266
#scp `find MOD35/summary -name "MOD35_h[0-9][0-9]v[0-9][0-9].nc"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/
267
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9]_mean.nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
268
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
269

    
270

    
271
system("gdalbuildvrt MOD35C6_2009.vrt summary/*2009mean.nc ") 
272
system("gdal_translate -stats -co \"COMPRESS=LZW\" -of GTiff MOD35C6_2009.vrt MOD35C6_2009.tif ")              
273
system("scp MOD35C6_2009.tif adamw@acrobates.eeb.24.177.10.190:/Users/adamw/Downloads/")
274
exit
275

    
276

    
(33-33/42)