Project

General

Profile

Download (10.4 KB) Statistics
| Branch: | Revision:
1
#### Script to facilitate processing of MOD06 data
2
### This script is meant to be run iteratively, rather than unsupervised. There are several steps that require manual checking (such as choosing the number of cores, etc.)
3

    
4
## working directory
5
setwd("/nobackupp1/awilso10/mod35")
6

    
7
## load libraries
8
library(rgdal)
9
library(raster)
10
library(RSQLite)
11

    
12
## flag to increase verbosity of output
13
verbose=T
14

    
15
## get MODLAND tile information
16
tb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
17
tb$tile=paste("h",sprintf("%02d",tb$ih),"v",sprintf("%02d",tb$iv),sep="")
18
tb=tb[tb$lon_min!=-999,]
19
save(tb,file="modlandTiles.Rdata")
20
load("modlandTiles.Rdata")
21

    
22
## Choose some tiles to process
23
### list of tiles to process
24
tiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
25
## or a northern block of tiles
26
tiles=apply(expand.grid(paste("h",11:17,sep=""),v=c("v00","v01","v02","v03","v04")),1,function(x) paste(x,collapse="",sep=""))
27
## subset to MODLAND tiles
28
alltiles=system("ls -r MODTILES/ | grep tif$ | cut -c1-6 | sort | uniq - ",intern=T)
29

    
30
## or run all tiles
31
#tiles=alltiles
32

    
33
## subset to tiles in global region (not outside global boundary in sinusoidal projection)
34
tiles=tiles[tiles%in%alltiles]
35

    
36
## subset tile corner matrix to tiles selected above
37
tile_bb=tb[tb$tile%in%tiles,]
38

    
39
### get list of files to process
40
datadir="/nobackupp4/datapool/modis/MOD35_L2.006/"
41

    
42
outdir="daily/" #paste("daily/",tile,sep="")
43

    
44
##find swaths in region from sqlite database for the specified tile
45
## this takes a while, about 30 minutes, so only rebuild if you need to update what's available...
46
rebuildswathtable=F
47
if(rebuildswathtable){
48
  ## path to swath database
49
  db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"
50
  con=dbConnect("SQLite", dbname = db)
51
  fs=do.call(rbind.data.frame,lapply(1:nrow(tile_bb),function(i){
52
    d=dbGetQuery(con,paste("SELECT * from swath_geo6
53
            WHERE east>=",tile_bb$lon_min[i]," AND
54
                  west<=",tile_bb$lon_max[i]," AND
55
                  north>=",tile_bb$lat_min[i]," AND
56
                  south<=",tile_bb$lat_max[i])
57
      )
58
    d$tile=tile_bb$tile[i]
59
    print(paste("Finished tile",tile_bb$tile[i]))
60
    return(d)
61
  }))
62
  con=dbDisconnect(con)
63
  fs$id=substr(fs$id,7,19)
64

    
65
  ## Identify which swaths are available in the datapool
66
  swaths=data.frame(path=list.files(datadir,pattern=paste("hdf$"),recursive=T,full=T),stringsAsFactors=F)  #all swaths in data pool
67
  swaths$id=substr(basename(swaths$path),10,22)
68
  fs$exists=fs$id%in%swaths$id 
69
  fs$path=swaths$path[match(fs$id,swaths$id)]
70

    
71
  ## write tile-swath list to disk
72
  save(fs,swaths,file="swathtile.Rdata")
73
}
74

    
75
if(!exists("fs")) load("swathtile.Rdata")
76

    
77
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database"))
78

    
79
## get all unique dates
80
fs$dateid=format(as.Date(paste(fs$year,fs$day,sep=""),"%Y%j"),"%Y%m%d")
81
#alldates=unique(fs$dateid[fs$exists])
82

    
83
#### Generate submission file
84
startdate="2000-03-01"
85
stopdate="2011-12-31"
86
## just 2005-2010
87
startdate="2009-01-01"
88
stopdate="2009-12-31"
89

    
90
alldates=format(seq(as.Date(startdate),as.Date(stopdate),1),"%Y%m%d")
91

    
92
proclist=expand.grid(date=alldates,tile=tiles)
93
proclist$year=substr(proclist$date,1,4)
94

    
95
 ## identify tile-dates with no available swaths
96
avail=unique(cbind.data.frame(tile=fs$tile,date=fs$dateid)[fs$exists, ])
97
proclist$avail=paste(proclist$tile,proclist$date,sep="_")%in%paste(avail$tile,avail$date,sep="_")
98

    
99
## identify which have been completed
100
#fdone=data.frame(path=system("ssh lou 'find MOD35/daily -name \"*.nc\"' ",intern=T))
101
fdone=data.frame(path=list.files(outdir,pattern="nc$",recursive=T))
102
fdone$date=substr(basename(as.character(fdone$path)),14,21)
103
fdone$tile=substr(basename(as.character(fdone$path)),7,12)
104
proclist$done=paste(proclist$tile,proclist$date,sep="_")%in%substr(basename(as.character(fdone$path)),7,21)
105

    
106
### report on what has already been processed
107
print(paste(sum(!proclist$done)," out of ",nrow(proclist)," (",round(100*sum(!proclist$done)/nrow(proclist),2),"%) remain"))
108
stem(table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done]))
109
#table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
110
table(table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done]))
111

    
112
### explore tile counts
113
#x=table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
114
#x=x[order(rownames(x)),]
115

    
116
script="/u/awilso10/environmental-layers/climate/procedures/MOD35_L2_process.r"
117
 
118
## write the table processed by mpiexec
119
tp=T  # rerun everything
120
tp=((!proclist$done)&proclist$avail)  #date-tiles to process
121
table(Available=proclist$avail,Completed=proclist$done)
122
table(tp)
123

    
124
write.table(paste("--verbose ",script," --date ",proclist$date[tp]," --verbose T --profile F --tile ",proclist$tile[tp],sep=""),
125
file=paste("notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
126

    
127
## try running it once for a single tile-date to get estimate of time/tile-day
128
test=F
129
if(test){
130
  i=2
131
  time1=system.time(system(paste("Rscript --verbose ",script," --date ",proclist$date[i]," --profile T --verbose T --tile ",proclist$tile[i],sep="")))
132
  hours=round(length(proclist$date[tp])*142/60/60); hours
133
  hours=round(length(proclist$date[tp])*time1[3]/60/60,1); hours
134
  nodes=100
135
  threads=nodes*8
136
  writeLines(paste(" ################### \n Hours per date-tile:",round(time1[3]/60/60,2),"\n Date-tiles to process:",sum(tp)," \n Estimated CPU time: ",hours,"hours \n  With ",threads,"threads:",round(hours/threads,2),"hours \n ###################"))
137
  summaryRprof("/nobackupp1/awilso10/mod35/log/profile.out")
138
}
139

    
140
### Set up submission script
141
queue="devel"
142
queue="normal" #"devel"
143
queue="long" #"devel"
144
nodes=120
145
walltime=24
146

    
147
### write qsub script to disk
148
cat(paste("
149
#PBS -S /bin/bash
150
#PBS -l select=",nodes,":ncpus=8:mpiprocs=8
151
#PBS -l walltime=",walltime,":00:00
152
#PBS -j n
153
#PBS -m be
154
#PBS -N mod35
155
#PBS -q ",queue,"
156
#PBS -V
157

    
158
CORES=",nodes*8,"
159
HDIR=/u/armichae/pr/
160
  source $HDIR/etc/environ.sh
161
  source /u/awilso10/environ.sh
162
  source /u/awilso10/.bashrc
163
IDIR=/nobackupp1/awilso10/mod35/
164
WORKLIST=$IDIR/notdone.txt
165
EXE=Rscript
166
LOGSTDOUT=$IDIR/log/mod35_stdout
167
LOGSTDERR=$IDIR/log/mod35_stderr
168
### use mpiexec to parallelize across days
169
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
170
",sep=""),file=paste("mod35_qsub",sep=""))
171

    
172
### Check the files
173
system(paste("cat mod35_qsub",sep=""))
174
system(paste("cat notdone.txt | head -n 4",sep=""))
175
system(paste("cat notdone.txt | wc -l ",sep=""))
176

    
177
## start interactive job on compute node for debugging
178
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
179

    
180

    
181
## Submit it
182
system(paste("qsub mod35_qsub",sep=""))
183

    
184
system("qstat -u awilso10")
185

    
186
#######################################################
187
### Now submit the script to generate the climatologies
188

    
189
## report 'mostly' finished tiles
190
## this relies on proclist above so be sure to update above before running
191
md=table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done])
192
mdt=names(md[md<10,])
193
tiles=mdt
194

    
195
tiles
196
ctiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
197

    
198
ctiles=tiles#[c(1:3)]  #subset to only some tiles (for example if some aren't finished yet)?
199
climatescript="/pleiades/u/awilso10/environmental-layers/climate/procedures/MOD35_Climatology.r"
200

    
201
## check which tiles have been processed and are on lou with a filename "MOD35_[tile].nc"
202
cdone=data.frame(path="",tile="")  #use this if you want to re-run everything
203
#cdone=data.frame(path=sapply(strsplit(basename(
204
#                   system("ssh lou 'find MOD35/summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"' ",intern=T)),split="_"),function(x) x[2]))
205
cdone=data.frame(path=sapply(strsplit(basename(
206
                   system("find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"",intern=T)),split="_"),function(x) x[2]))
207
cdone$tile=substr(basename(as.character(cdone$path)),1,6)
208
print(paste(length(ctiles[!ctiles%in%cdone$tile]),"Tiles still need to be processed"))
209

    
210
## write the table processed by mpiexec
211
write.table(paste("--verbose ",climatescript," --verbose T --tile ",ctiles[!ctiles%in%cdone$tile],sep=""),
212
file=paste("notdone_climate.txt",sep=""),row.names=F,col.names=F,quote=F)
213

    
214
## delay start until previous jobs have finished?
215
delay=F
216
## check running jobs to get JobID of job you want to wait for
217
system("qstat -u awilso10",intern=T)
218
## enter JobID here:
219
job="2031668.pbspl1.nas.nasa.gov"
220

    
221

    
222
queue="devel"
223
nodes=50
224
walltime=2
225

    
226
### qsub script
227
cat(paste("
228
#PBS -S /bin/bash
229
#PBS -l select=",nodes,":ncpus=8:mem=94
230
#PBS -l walltime=",walltime,":00:00
231
#PBS -j n
232
#PBS -m be
233
#PBS -N mod35_climate
234
#PBS -q ",queue,"
235
#PBS -V
236
",if(delay) paste("#PBS -W depend=afterany:",job,sep="")," 
237

    
238
CORES=",nodes*8,"
239
HDIR=/u/armichae/pr/
240
  source $HDIR/etc/environ.sh
241
  source /pleiades/u/awilso10/environ.sh
242
  source /pleiades/u/awilso10/.bashrc
243
IDIR=/nobackupp1/awilso10/mod35/
244
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
245
WORKLIST=$IDIR/notdone_climate.txt
246
EXE=Rscript
247
LOGSTDOUT=$IDIR/log/climatology_stdout
248
LOGSTDERR=$IDIR/log/climatology_stderr
249
### use mpiexec to parallelize across tiles
250
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
251
",sep=""),file=paste("mod35_climatology_qsub",sep=""))
252

    
253
## check files
254
system(paste("cat mod35_climatology_qsub",sep=""))        #qsub submission script
255
system(paste("cat notdone_climate.txt | head",sep=""))    #top of job file
256
system(paste("cat notdone_climate.txt | wc -l ",sep=""))  #number of jobs to be run
257

    
258
## Submit it
259
system(paste("qsub mod35_climatology_qsub",sep=""))
260

    
261
## check progress
262
system("qstat -u awilso10")
263

    
264

    
265

    
266
#################################################################
267
### copy the files back to Yale
268

    
269

    
270
#system("ssh lou")
271
#scp `find MOD35/summary -name "MOD35_h[0-9][0-9]v[0-9][0-9].nc"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/
272
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9]_mean.nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
273
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
274

    
275

    
276
system("gdalbuildvrt MOD35C6_2009.vrt summary/*2009mean.nc ") 
277
system("gdal_translate -stats -co \"COMPRESS=LZW\" -of GTiff MOD35C6_2009.vrt MOD35C6_2009.tif ")              
278
system("scp MOD35C6_2009.tif adamw@acrobates.eeb.24.177.10.190:/Users/adamw/Downloads/")
279
exit
280

    
281

    
(32-32/38)