Project

General

Profile

Download (8.27 KB) Statistics
| Branch: | Revision:
1
#### Script to facilitate processing of MOD06 data
2
  
3
setwd("/nobackupp1/awilso10/mod35")
4

    
5
library(rgdal)
6
library(raster)
7
library(RSQLite)
8

    
9

    
10
verbose=T
11

    
12
## get MODLAND tile information
13
tb=read.table("http://landweb.nascom.nasa.gov/developers/sn_tiles/sn_bound_10deg.txt",skip=6,nrows=648,header=T)
14
tb$tile=paste("h",sprintf("%02d",tb$ih),"v",sprintf("%02d",tb$iv),sep="")
15
tb=tb[tb$lon_min!=-999,]
16
save(tb,file="modlandTiles.Rdata")
17
load("modlandTiles.Rdata")
18

    
19
## delete temporary log file that can grow to GB
20
system("rm /nobackupp1/awilso10/software/heg/TOOLKIT_MTD/runtime/LogStatus")
21

    
22
### list of tiles to process
23
tiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
24

    
25
## subset to MODLAND tiles
26
 modlandtiles=system("ls -r /nobackupp4/datapool/modis/MOD11A1.005/2010* | grep hdf$ | cut -c18-23 | sort | uniq - ",intern=T)
27
 tb$land=tb$tile%in%modlandtiles
28
tiles=tb$tile[tb$land]
29

    
30
## subset tile corner matrix to tiles selected above
31
tile_bb=tb[tb$tile%in%tiles,]
32

    
33
### get list of files to process
34
datadir="/nobackupp4/datapool/modis/MOD35_L2.006/"
35

    
36
outdir="daily/" #paste("daily/",tile,sep="")
37

    
38
##find swaths in region from sqlite database for the specified date/tile
39
## this takes a while, about 30 minutes, so only rebuild if you need to update what's available...
40
rebuildswathtable=F
41
if(rebuildswathtable){
42
  ## path to swath database
43
  db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"
44
  con=dbConnect("SQLite", dbname = db)
45
  fs=do.call(rbind.data.frame,lapply(1:nrow(tile_bb),function(i){
46
    d=dbGetQuery(con,paste("SELECT * from swath_geo6
47
            WHERE east>=",tile_bb$lon_min[i]," AND
48
                  west<=",tile_bb$lon_max[i]," AND
49
                  north>=",tile_bb$lat_min[i]," AND
50
                  south<=",tile_bb$lat_max[i])
51
      )
52
    d$tile=tile_bb$tile[i]
53
    print(paste("Finished tile",tile_bb$tile[i]))
54
    return(d)
55
  }))
56
  con=dbDisconnect(con)
57
  fs$id=substr(fs$id,7,19)
58

    
59
  ## Identify which swaths are available in the datapool
60
  swaths=data.frame(path=list.files(datadir,pattern=paste("hdf$"),recursive=T,full=T),stringsAsFactors=F)  #all swaths in data pool
61
  swaths$id=substr(basename(swaths$path),10,22)
62
  fs$exists=fs$id%in%swaths$id 
63
  fs$path=swaths$path[match(fs$id,swaths$id)]
64

    
65
  ## write tile-swath list to disk
66
  save(fs,swaths,file="swathtile.Rdata")
67
}
68

    
69
load("swathtile.Rdata")
70

    
71
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database"))
72

    
73
## get all unique dates
74
fs$dateid=format(as.Date(paste(fs$year,fs$day,sep=""),"%Y%j"),"%Y%m%d")
75
#alldates=unique(fs$dateid[fs$exists])
76

    
77
#### Generate submission file
78
startdate="2000-03-01"
79
stopdate="2011-12-31"
80
## just 2005-2010
81
startdate="2009-01-01"
82
stopdate="2009-12-31"
83

    
84
alldates=format(seq(as.Date(startdate),as.Date(stopdate),1),"%Y%m%d")
85

    
86
proclist=expand.grid(date=alldates,tile=tiles)
87
proclist$year=substr(proclist$date,1,4)
88

    
89
## identify tile-dates with no available swaths
90
avail=unique(cbind.data.frame(tile=fs$tile,date=fs$dateid)[fs$exists, ])
91
proclist$avail=paste(proclist$tile,proclist$date,sep="_")%in%paste(avail$tile,avail$date,sep="_")
92

    
93
## identify which have been completed
94
#fdone=data.frame(path=system("ssh lou 'find MOD35/daily -name \"*.nc\"' ",intern=T))
95
fdone=data.frame(path=list.files(outdir,pattern="nc$",recursive=T))
96
fdone$date=substr(basename(as.character(fdone$path)),14,21)
97
fdone$tile=substr(basename(as.character(fdone$path)),7,12)
98
proclist$done=paste(proclist$tile,proclist$date,sep="_")%in%substr(basename(as.character(fdone$path)),7,21)
99

    
100
### report on what has already been processed
101
print(paste(sum(!proclist$done)," out of ",nrow(proclist)," (",round(100*sum(!proclist$done)/nrow(proclist),2),"%) remain"))
102
table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
103
table(table(tile=proclist$tile[!proclist$done],year=proclist$year[!proclist$done]))
104

    
105
### explore tile counts
106
#x=table(tile=proclist$tile[proclist$done],year=proclist$year[proclist$done])
107
#x=x[order(rownames(x)),]
108

    
109
script="/u/awilso10/environmental-layers/climate/procedures/MOD35_L2_process.r"
110

    
111
## write the table processed by mpiexec
112
tp=((!proclist$done)&proclist$avail)  #date-tiles to process
113
table(Available=proclist$avail,Completed=proclist$done)
114

    
115
write.table(paste("--verbose ",script," --date ",proclist$date[tp]," --verbose T --tile ",proclist$tile[tp],sep=""),
116
file=paste("notdone.txt",sep=""),row.names=F,col.names=F,quote=F)
117

    
118
### qsub script
119
cat(paste("
120
#PBS -S /bin/bash
121
##PBS -l select=100:ncpus=8:mpiprocs=8
122
#PBS -l select=10:ncpus=8:mpiprocs=8
123
##PBS -l walltime=8:00:00
124
#PBS -l walltime=2:00:00
125
#PBS -j n
126
#PBS -m be
127
#PBS -N mod35
128
##PBS -q normal
129
#PBS -q devel
130
#PBS -V
131

    
132
CORES=80
133
#CORES=160
134

    
135
HDIR=/u/armichae/pr/
136
#  source $HDIR/etc/environ.sh
137
  source /u/awilso10/environ.sh
138
  source /u/awilso10/.bashrc
139
IDIR=/nobackupp1/awilso10/mod35/
140
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
141
WORKLIST=$IDIR/notdone.txt
142
EXE=Rscript
143
LOGSTDOUT=$IDIR/log/mod35_stdout
144
LOGSTDERR=$IDIR/log/mod35_stderr
145
### use mpiexec to parallelize across days
146
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
147
",sep=""),file=paste("mod35_qsub",sep=""))
148

    
149

    
150
### Check the files
151
system(paste("cat mod35_qsub",sep=""))
152
system(paste("cat notdone.txt | head",sep=""))
153
system(paste("cat notdone.txt | wc -l ",sep=""))
154

    
155
## Submit it
156
system(paste("qsub mod35_qsub",sep=""))
157

    
158
system("qstat -u awilso10")
159

    
160
#######################################################
161
### Now submit the script to generate the climatologies
162

    
163
tiles
164
ctiles=c("h10v08","h11v08","h12v08","h10v07","h11v07","h12v07")  # South America
165

    
166
ctiles=tiles#[c(1:3)]  #subset to only some tiles (for example if some aren't finished yet)?
167
climatescript="/pleiades/u/awilso10/environmental-layers/climate/procedures/MOD35_Climatology.r"
168

    
169
## check which tiles have been processed and are on lou with a filename "MOD35_[tile].nc"
170
cdone=data.frame(path="",tile="")  #use this if you want to re-run everything
171
cdone=data.frame(path=sapply(strsplit(basename(
172
                   system("ssh lou 'find MOD35/summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9].nc\"' ",intern=T)),split="_"),function(x) x[2]))
173
cdone$tile=substr(basename(as.character(cdone$path)),1,6)
174
print(paste(length(ctiles[!ctiles%in%cdone$tile]),"Tiles still need to be processed"))
175

    
176
## write the table processed by mpiexec
177
write.table(paste("--verbose ",climatescript," --verbose T --tile ",ctiles[!ctiles%in%cdone$tile],sep=""),
178
file=paste("notdone_climate.txt",sep=""),row.names=F,col.names=F,quote=F)
179

    
180
## delay start until previous jobs have finished?
181
delay=F
182
## check running jobs to get JobID of job you want to wait for
183
system("qstat -u awilso10")
184
## enter JobID here:
185
job="881394.pbspl1.nas.nasa.gov"
186

    
187
### qsub script
188
cat(paste("
189
#PBS -S /bin/bash
190
#PBS -l select=20:ncpus=8:mem=94
191
#PBS -l walltime=3:00:00
192
#PBS -j n
193
#PBS -m be
194
#PBS -N mod35_climate
195
#PBS -q normal
196
##PBS -q ldan
197
#PBS -V
198
",if(delay) paste("#PBS -W depend=afterany:",job,sep="")," 
199

    
200
CORES=160
201
HDIR=/u/armichae/pr/
202
  source $HDIR/etc/environ.sh
203
  source /pleiades/u/awilso10/environ.sh
204
  source /pleiades/u/awilso10/.bashrc
205
IDIR=/nobackupp1/awilso10/mod35/
206
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt
207
WORKLIST=$IDIR/notdone_climate.txt
208
EXE=Rscript
209
LOGSTDOUT=$IDIR/log/climatology_stdout
210
LOGSTDERR=$IDIR/log/climatology_stderr
211
### use mpiexec to parallelize across tiles
212
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR
213
",sep=""),file=paste("mod35_climatology_qsub",sep=""))
214

    
215
## check files
216
system(paste("cat mod35_climatology_qsub",sep=""))        #qsub submission script
217
system(paste("cat notdone_climate.txt | head",sep=""))    #top of job file
218
system(paste("cat notdone_climate.txt | wc -l ",sep=""))  #number of jobs to be run
219

    
220
## Submit it
221
system(paste("qsub mod35_climatology_qsub",sep=""))
222

    
223
## check progress
224
system("qstat -u awilso10")
225

    
226
## start interactive job on compute node for debugging
227
# system("qsub -I -l walltime=2:00:00 -lselect=2:ncpus=16:model=san -q devel")
228

    
229

    
230
#################################################################
231
### copy the files back to Yale
232

    
233
system("ssh lou")
234
#scp `find MOD35/summary -name "MOD35_h[0-9][0-9]v[0-9][0-9].nc"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/
235
system("rsync -cavv `find summary -name \"MOD35_h[0-9][0-9]v[0-9][0-9]_mean.nc\"` adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod35/summary/")
236
exit
237

    
238

    
(29-29/33)