Revision c33d3b68
Added by Adam M. Wilson about 12 years ago
climate/procedures/MOD06_L2_data_compile.r | ||
---|---|---|
162 | 162 |
Sys.setenv(GRASS_OVERWRITE=1) |
163 | 163 |
Sys.setenv(DEBUG=0) |
164 | 164 |
|
165 |
initGRASS(gisBase="/usr/lib/grass64",SG=td,gisDbase=gisDbase,location=gisLocation,mapset="PERMANENT",override=T,pid=Sys.getpid()) |
|
166 |
getLocationProj() |
|
167 |
system(paste("g.proj -c proj4=\"+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +a=6371007.181 +b=6371007.181 +datum=WGS84 +units=m +no_defs\"",sep="")) |
|
168 |
|
|
169 |
#system("g.mapset PERMANENT") |
|
170 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",outdir,"/",fs$file[1],"\":mod06:Cloud_Mask_1km_0",sep=""), |
|
171 |
output="modisgrid",flags=c("quiet","overwrite","o")) |
|
172 |
system("g.region rast=modisgrid save=roi --overwrite") |
|
173 |
system("g.region roi") |
|
174 |
system("g.region -p") |
|
175 |
getLocationProj() |
|
176 |
|
|
177 | 165 |
## temporary objects to test function below |
178 | 166 |
i=1 |
179 | 167 |
file=paste(outdir,"/",fs$file[1],sep="") |
... | ... | |
182 | 170 |
|
183 | 171 |
### Function to extract various SDSs from a single gridded HDF file and use QA data to throw out 'bad' observations |
184 | 172 |
loadcloud<-function(date,fs){ |
185 |
## Identify which files to process |
|
173 |
### set up grass session |
|
174 |
tf=paste(tempdir(),"/grass", Sys.getpid(),"/", sep="") |
|
175 |
|
|
176 |
## set up tempfile for this PID |
|
177 |
initGRASS(gisBase="/usr/lib/grass64",gisDbase=tf,SG=td,override=T,location="mod06",mapset="PERMANENT",home=tf,pid=Sys.getpid()) |
|
178 |
system(paste("g.proj -c proj4=\"+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +a=6371007.181 +b=6371007.181 +datum=WGS84 +units=m +no_defs\"",sep="")) |
|
179 |
|
|
180 |
#system("g.mapset PERMANENT") |
|
181 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",outdir,"/",fs$file[1],"\":mod06:Cloud_Mask_1km_0",sep=""), |
|
182 |
output="modisgrid",flags=c("quiet","overwrite","o")) |
|
183 |
system("g.region rast=modisgrid save=roi --overwrite") |
|
184 |
system("g.region roi") |
|
185 |
system("g.region -p") |
|
186 |
# getLocationProj() |
|
187 |
|
|
188 |
|
|
189 |
## Identify which files to process |
|
186 | 190 |
tfs=fs$file[fs$date==date] |
187 | 191 |
nfs=length(tfs) |
188 |
unlink_.gislock() |
|
189 |
## set new PID for this grass process (running within a spawned R session if using multicore) |
|
190 |
set.GIS_LOCK(Sys.getpid()) |
|
191 |
## create new mapset to hold all data for this day |
|
192 |
system(paste("g.mapset -c mapset=",gisMapset,"_",format(date,"%Y%m%d"),sep="")) |
|
193 |
# file.copy(paste(gisDbase,"/",gisLocation,"/PERMANENT/DEFAULT_WIND",sep=""),paste(gisDbase,"/",gisLocation,"/",gisMapset,"_",format(date,"%Y%m%d"),"/WIND",sep="")) |
|
194 |
system("g.region roi@PERMANENT") |
|
192 |
|
|
193 |
### print some summary info |
|
195 | 194 |
print(date) |
196 |
print(gmeta6()) |
|
197 |
print(Sys.getpid()) |
|
198 | 195 |
## loop through scenes and process QA flags |
199 | 196 |
for(i in 1:nfs){ |
200 | 197 |
file=paste(outdir,"/",tfs[i],sep="") |
... | ... | |
217 | 214 |
QA_COT3_",i,"= ((QA_",i," / 2^3) % 2^2 )==0 |
218 | 215 |
QA_CER_",i,"= ((QA_",i," / 2^5) % 2^1 )==1 |
219 | 216 |
QA_CER2_",i,"= ((QA_",i," / 2^6) % 2^2 )==3 |
220 |
QA_CWP_",i,"= ((QA_",i," / 2^8) % 2^1 )==1 |
|
221 |
QA_CWP2_",i,"= ((QA_",i," / 2^9) % 2^2 )==3 |
|
222 | 217 |
EOF",sep="")) |
218 |
# QA_CWP_",i,"= ((QA_",i," / 2^8) % 2^1 )==1 |
|
219 |
# QA_CWP2_",i,"= ((QA_",i," / 2^9) % 2^2 )==3 |
|
223 | 220 |
|
224 | 221 |
## Optical Thickness |
225 | 222 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",file,"\":mod06:Cloud_Optical_Thickness",sep=""), |
... | ... | |
244 | 241 |
system(paste("r.mapcalc \"CER2_",i,"=if(CM_clear_",i,"==0,CER_",i,",0)\"",sep="")) |
245 | 242 |
|
246 | 243 |
## Cloud Water Path |
247 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",file,"\":mod06:Cloud_Water_Path",sep=""), |
|
248 |
output=paste("CWP_",i,sep=""),title="cloud_water_path", |
|
249 |
flags=c("overwrite","o")) ; print("") |
|
250 |
execGRASS("r.null",map=paste("CWP_",i,sep=""),setnull="-9999") |
|
251 |
## keep only positive CWP values where quality is 'useful' and 'very good' & scale to real units |
|
252 |
# system(paste("r.mapcalc \"CWP=if(QA_CWP&&QA_CWP2,CWP,null())\"")) |
|
244 |
# execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",file,"\":mod06:Cloud_Water_Path",sep=""), |
|
245 |
# output=paste("CWP_",i,sep=""),title="cloud_water_path", |
|
246 |
# flags=c("overwrite","o")) ; print("") |
|
247 |
# execGRASS("r.null",map=paste("CWP_",i,sep=""),setnull="-9999") |
|
253 | 248 |
## keep only positive CER values where quality is 'useful' and 'very good' & scale to real units |
254 |
system(paste("r.mapcalc \"CWP_",i,"=if(QA_CWP_",i,"&&QA_CWP2_",i,"&&CWP_",i,">=0,CWP_",i,"*0.009999999776482582,null())\"",sep="")) |
|
249 |
# system(paste("r.mapcalc \"CWP_",i,"=if(QA_CWP_",i,"&&QA_CWP2_",i,"&&CWP_",i,">=0,CWP_",i,"*0.009999999776482582,null())\"",sep=""))
|
|
255 | 250 |
## set CER to 0 in clear-sky pixels |
256 |
system(paste("r.mapcalc \"CWP2_",i,"=if(CM_clear_",i,"==0,CWP_",i,",0)\"",sep="")) |
|
251 |
# system(paste("r.mapcalc \"CWP2_",i,"=if(CM_clear_",i,"==0,CWP_",i,",0)\"",sep=""))
|
|
257 | 252 |
|
258 | 253 |
|
259 | 254 |
} #end loop through sub daily files |
... | ... | |
271 | 266 |
EOF",sep="")) |
272 | 267 |
|
273 | 268 |
#### Write the file to a geotiff |
274 |
execGRASS("r.out.gdal",input="CER_daily",output=paste(tifdir,"/CER_",format(date,"%Y%m%d"),".tif",sep=""),nodata=-999) |
|
275 |
execGRASS("r.out.gdal",input="COT_daily",output=paste(tifdir,"/COT_",format(date,"%Y%m%d"),".tif",sep=""),nodata=-999) |
|
276 |
execGRASS("r.out.gdal",input="CLD_daily",output=paste(tifdir,"/CLD_",format(date,"%Y%m%d"),".tif",sep=""),nodata=-999)
|
|
269 |
execGRASS("r.out.gdal",input="CER_daily",output=paste(tifdir,"/CER_",format(date,"%Y%m%d"),".tif",sep=""),nodata=-999,flags=c("quiet"))
|
|
270 |
execGRASS("r.out.gdal",input="COT_daily",output=paste(tifdir,"/COT_",format(date,"%Y%m%d"),".tif",sep=""),nodata=-999,flags=c("quiet"))
|
|
271 |
execGRASS("r.out.gdal",input="CLD_daily",output=paste(tifdir,"/CLD_",format(date,"%Y%m%d"),".tif",sep=""),nodata=99,flags=c("quiet"))
|
|
277 | 272 |
|
278 | 273 |
### delete the temporary files |
279 | 274 |
unlink_.gislock() |
280 | 275 |
system("/usr/lib/grass64/etc/clean_temp") |
281 |
# system(paste("rm -R ",gmeta6()$GISDBASE,"/",gmeta6()$LOCATION_NAME,"/",gmeta6()$MAPSET,sep="")) |
|
282 |
|
|
276 |
system(paste("rm -R ",tf,sep="")) |
|
277 |
### print update |
|
278 |
print(paste(" ################################################################### Finished ",date," |
|
279 |
################################################################")) |
|
283 | 280 |
} |
284 | 281 |
|
285 | 282 |
|
... | ... | |
287 | 284 |
### Now run it |
288 | 285 |
|
289 | 286 |
tdates=sort(unique(fs$date)) |
290 |
done=tdates%in%as.Date(substr(list.files("data/modis/MOD06_L2_tif"),5,12),"%Y%m%d")
|
|
287 |
done=tdates%in%as.Date(substr(list.files(tifdir),5,12),"%Y%m%d")
|
|
291 | 288 |
table(done) |
292 | 289 |
tdates=tdates[!done] |
293 | 290 |
|
294 |
lapply(tdates,function(date) loadcloud(date,fs=fs)) |
|
291 |
mclapply(tdates,function(date) loadcloud(date,fs=fs))
|
|
295 | 292 |
|
296 | 293 |
|
297 |
## unlock the grass database |
|
298 |
unlink_.gislock() |
|
299 |
|
|
300 |
|
|
301 | 294 |
|
302 | 295 |
#######################################################################################33 |
303 | 296 |
### Produce the monthly averages |
... | ... | |
328 | 321 |
mclapply(1:nrow(vs),function(i){ |
329 | 322 |
print(paste("Starting ",vs$type[i]," for month ",vs$month[i])) |
330 | 323 |
td=stack(fs2$path[which(fs2$month==vs$month[i]&fs2$type==vs$type[i])]) |
324 |
print(paste("Processing Metric ",vs$type[i]," for month ",vs$month[i])) |
|
331 | 325 |
calc(td,mean,na.rm=T, |
332 | 326 |
filename=paste(summarydatadir,"/",vs$type[i],"_mean_",vs$month[i],".tif",sep=""), |
333 | 327 |
format="GTiff") |
Also available in: Unified diff
Updated MOD06 compile script to process the tiles in parallel using GRASS. This brings processing time for 10 year archive for oregon from 48 hours to ~2 hours on 24 cores. Also added several exploratory analysis to the data_summary script.