Revision f9d40a14
Added by Adam Wilson almost 12 years ago
climate/procedures/MOD06_Climatology.r | ||
---|---|---|
1 |
### Process a folder of daily MOD06 HDF files to produce a climatology |
|
2 |
|
|
3 |
## import commandline arguments |
|
4 |
library(getopt) |
|
5 |
## get options |
|
6 |
opta <- getopt(matrix(c( |
|
7 |
'tile', 't', 1, 'character', |
|
8 |
'verbose','v',1,'logical' |
|
9 |
), ncol=4, byrow=TRUE)) |
|
10 |
|
|
11 |
tile=opta$tile #tile="h11v08" |
|
12 |
verbose=opta$verbose #print out extensive information for debugging? |
|
13 |
|
|
14 |
### directory containing daily files |
|
15 |
outdir=paste("daily/",tile,"/",sep="") #directory for separate daily files |
|
16 |
if(!file.exists(outdir)) dir.create(outdir) |
|
17 |
|
|
18 |
### directory to hold climatology |
|
19 |
outdir2="summary" #directory for combined daily files and summarized files |
|
20 |
if(!file.exists(outdir2)) dir.create(outdir2) |
|
21 |
|
|
22 |
### path to NCO |
|
23 |
ncopath="/nasa/sles11/nco/4.0.8/gcc/mpt/bin/" |
|
24 |
|
|
25 |
### Vector of variables that must be in file or they will be deleted. |
|
26 |
### Formated as output from system(paste("cdo -s showvar ",fdly$path[i]),intern=T) |
|
27 |
finalvars=" CER COT CLD" |
|
28 |
|
|
29 |
|
|
30 |
################################################################################ |
|
31 |
## Get list of all daily files |
|
32 |
if(verbose) print("Checking daily output in preparation for generating climatology") |
|
33 |
|
|
34 |
fdly=data.frame( |
|
35 |
path=list.files(outdir,pattern="nc$",full=T), |
|
36 |
file=list.files(outdir,pattern="nc$")) |
|
37 |
fdly$dateid=substr(fdly$file,14,21) |
|
38 |
fdly$date=as.Date(substr(fdly$file,14,21),"%Y%m%d") |
|
39 |
fdly$month=format(fdly$date,"%m") |
|
40 |
fdly$year=format(fdly$date,"%Y") |
|
41 |
nrow(fdly) |
|
42 |
|
|
43 |
## check validity (via npar and ntime) of nc files |
|
44 |
for(i in 1:nrow(fdly)){ |
|
45 |
fdly$ntime[i]<-as.numeric(system(paste("cdo -s ntime ",fdly$path[i]),intern=T)) |
|
46 |
fdly$npar[i]<-as.numeric(system(paste("cdo -s npar ",fdly$path[i]),intern=T)) |
|
47 |
fdly$fyear[i]<-as.numeric(system(paste("cdo -s showyear ",fdly$path[i]),intern=T)) |
|
48 |
fdly$fmonth[i]<-as.numeric(system(paste("cdo -s showmon ",fdly$path[i]),intern=T)) |
|
49 |
fdly$fvar[i]<-system(paste("cdo -s showvar ",fdly$path[i]),intern=T) |
|
50 |
print(paste(i," out of ",nrow(fdly)," for year ", fdly$fyear[i])) |
|
51 |
} |
|
52 |
|
|
53 |
## print some summaries |
|
54 |
if(verbose) print("Summary of available daily files") |
|
55 |
print(table(fdly$fyear)) |
|
56 |
print(table(fdly$fmonth)) |
|
57 |
print(table(fdly$fvar)) |
|
58 |
|
|
59 |
## delete files that fail check? |
|
60 |
delete=T |
|
61 |
if(delete) { |
|
62 |
fdly$drop=is.na(fdly$npar)|fdly$fvar!=finalvars |
|
63 |
print(paste(sum(fdly$drop),"files will be deleted")) |
|
64 |
file.remove(as.character(fdly$path[fdly$drop])) |
|
65 |
fdly=fdly[!fdly$drop,] |
|
66 |
} |
|
67 |
|
|
68 |
################################################################################# |
|
69 |
## Combine the year-by-year files into a single daily file in the summary directory (for archiving) |
|
70 |
if(verbose) print("Merging daily files into single file output") |
|
71 |
|
|
72 |
## create temporary directory to put intermediate files (will be deleted when R quits) |
|
73 |
tsdir=paste(tempdir(),"/summary",sep="") |
|
74 |
if(!file.exists(tsdir)) dir.create(tsdir,recursive=T) |
|
75 |
|
|
76 |
## merge all daily files to create a single file with all dates |
|
77 |
system(paste(ncopath,"ncrcat -O ",outdir,"/*nc ",outdir2,"/MOD06_",tile,"_daily.nc",sep="")) |
|
78 |
|
|
79 |
## Update attributes |
|
80 |
system(paste(ncopath,"ncatted ", |
|
81 |
" -a units,time,o,c,\"days since 2000-1-1 0:0:0\" ", |
|
82 |
" -a title,global,o,c,\"MODIS Cloud Product (MOD06) Daily Timeseries\" ", |
|
83 |
" -a institution,global,o,c,\"Yale University\" ", |
|
84 |
" -a source,global,o,c,\"MODIS Cloud Product (MOD06)\" ", |
|
85 |
" -a comment,global,o,c,\"Compiled by Adam M. Wilson (adam.wilson@yale.edu)\" ", |
|
86 |
outdir2,"/MOD06_",tile,"_daily.nc",sep="")) |
|
87 |
|
|
88 |
### produce a monthly timeseries? |
|
89 |
#system(paste("cdo -O monmean ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_monmean.nc",sep="")) |
|
90 |
|
|
91 |
############################# |
|
92 |
## Generate the Climatologies |
|
93 |
if(verbose) print("Generate monthly climatologies") |
|
94 |
|
|
95 |
myear=as.integer(max(fdly$year)) #this year will be used in all dates of monthly climatologies (and day will = 15) |
|
96 |
|
|
97 |
## Monthly means |
|
98 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonmean ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonmean.nc",sep=""),wait=T) |
|
99 |
|
|
100 |
## Monthly standard deviation |
|
101 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonstd ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
102 |
system(paste(ncopath,"ncrename -v CER,CER_sd -v CLD,CLD_sd -v COT,COT_sd ",tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
103 |
system(paste(ncopath,"ncatted ", |
|
104 |
" -a long_name,CER_sd,o,c,\"Cloud Particle Effective Radius (standard deviation of daily observations)\" ", |
|
105 |
" -a long_name,CLD_sd,o,c,\"Cloud Mask (standard deviation of daily observations)\" ", |
|
106 |
" -a long_name,COT_sd,o,c,\"Cloud Optical Thickness (standard deviation of daily observations)\" ", |
|
107 |
tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
108 |
|
|
109 |
## cer > 20 |
|
110 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonmean -gtc,20 -selvar,CER ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
111 |
system(paste(ncopath,"ncrename -v CER,CER20 ",tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
112 |
system(paste(ncopath,"ncatted ", |
|
113 |
" -a long_name,CER20,o,c,\"Proportion of Days with Cloud Particle Effective Radius > 20um\" ", |
|
114 |
" -a units,CER20,o,c,\"Proportion\" ", |
|
115 |
tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
116 |
|
|
117 |
## number of observations |
|
118 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -nint -mulc,100 -ymonmean -eqc,9999 -setmisstoc,9999 -selvar,CER,CLD ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
119 |
system(paste(ncopath,"ncrename -v CER,CER_pmiss -v CLD,CLD_pmiss ",tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
120 |
system(paste(ncopath,"ncatted ", |
|
121 |
" -a long_name,CER_pmiss,o,c,\"Proportion of Days with missing data for CER\" ", |
|
122 |
" -a long_name,CLD_pmiss,o,c,\"Proportion of Days with missing data for CLD\" ", |
|
123 |
" -a scale_factor,CER_pmiss,o,d,0.01 ", |
|
124 |
" -a units,CER_pmiss,o,c,\"Proportion\" ", |
|
125 |
" -a scale_factor,CLD_pmiss,o,d,0.01 ", |
|
126 |
" -a units,CLD_pmiss,o,c,\"Proportion\" ", |
|
127 |
tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
128 |
|
|
129 |
## append variables to a single file |
|
130 |
if(verbose) print("Append all monthly climatologies into a single file") |
|
131 |
system(paste(ncopath,"ncks -O ",tsdir,"/MOD06_",tile,"_ymonmean.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
132 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymonstd.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
133 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymoncer20.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
134 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymonmiss.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
135 |
|
|
136 |
## append sinusoidal grid from one of input files as CDO doesn't transfer all attributes |
|
137 |
if(verbose) print("Clean up file (update attributes, flip latitudes, add grid description") |
|
138 |
|
|
139 |
#system(paste(ncopath,"ncea -d time,0,1 -v sinusoidal ",list.files(outdir,full=T,pattern="[.]nc$")[1]," ",tsdir,"/sinusoidal.nc",sep="")) |
|
140 |
#system(paste(ncopath,"ncks -A -d time,0,1 -v sinusoidal ",list.files(outdir,full=T,pattern="[.]nc$")[1]," ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
141 |
|
|
142 |
## invert latitude so it plays nicely with gdal |
|
143 |
system(paste(ncopath,"ncpdq -O -a -y ",tsdir,"/MOD06_",tile,"_ymon.nc ",outdir2,"/MOD06_",tile,".nc",sep="")) |
|
144 |
|
|
145 |
## update attributes |
|
146 |
system(paste(ncopath,"ncatted ", |
|
147 |
#" -a standard_parallel,sinusoidal,o,c,\"0\" ", |
|
148 |
#" -a longitude_of_central_meridian,sinusoidal,o,c,\"0\" ", |
|
149 |
#" -a latitude_of_central_meridian,sinusoidal,o,c,\"0\" ", |
|
150 |
" -a units,time,o,c,\"days since 2000-1-1 0:0:0\" ", |
|
151 |
" -a title,global,o,c,\"MODIS Cloud Product (MOD06) Climatology\" ", |
|
152 |
" -a institution,global,o,c,\"Yale University\" ", |
|
153 |
" -a source,global,o,c,\"MODIS Cloud Product (MOD06)\" ", |
|
154 |
" -a comment,global,o,c,\"Compiled by Adam M. Wilson (adam.wilson@yale.edu)\" ", |
|
155 |
outdir2,"/MOD06_",tile,".nc",sep="")) |
|
156 |
|
|
157 |
|
|
158 |
print(paste("############################### Processed ",nrow(fdly),"days for tile:",tile," ###################################################")) |
|
159 |
print("Years:") |
|
160 |
print(table(fdly$fyear)) |
|
161 |
print("Months:") |
|
162 |
print(table(fdly$fmonth)) |
|
163 |
|
|
164 |
## quit R |
|
165 |
q("no") |
|
166 |
|
climate/procedures/MOD06_L2_process.r | ||
---|---|---|
31 | 31 |
verbose=opta$verbose #print out extensive information for debugging? |
32 | 32 |
outdir=paste("daily/",tile,"/",sep="") #directory for separate daily files |
33 | 33 |
|
34 |
## permanent storage on lou:
|
|
35 |
ldir=paste("lfe1.nas.nasa.gov:/u/awilso10/mod06/daily/",tile,sep="")
|
|
34 |
## location of MOD06 files
|
|
35 |
datadir="/nobackupp4/datapool/modis/MOD06_L2.005/"
|
|
36 | 36 |
|
37 |
### print some status messages |
|
37 | 38 |
print(paste("Processing tile",tile," for date",date)) |
38 | 39 |
|
39 | 40 |
## load libraries |
40 | 41 |
require(reshape) |
41 | 42 |
require(geosphere) |
42 | 43 |
require(raster) |
43 |
library(rgdal)
|
|
44 |
require(rgdal)
|
|
44 | 45 |
require(spgrass6) |
45 | 46 |
require(RSQLite) |
46 | 47 |
|
48 |
## path to swath database |
|
49 |
db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db" |
|
50 |
## path to NCO |
|
51 |
ncopath="/nasa/sles11/nco/4.0.8/gcc/mpt/bin/" |
|
47 | 52 |
|
48 |
## specify some working directories
|
|
53 |
## specify working directory
|
|
49 | 54 |
setwd("/nobackupp1/awilso10/mod06") |
50 | 55 |
|
51 |
## load ancillary data |
|
52 |
load(file="allfiles.Rdata") |
|
53 |
|
|
54 | 56 |
## load tile information |
55 | 57 |
load(file="modlandTiles.Rdata") |
56 | 58 |
|
59 |
## path to MOD11A1 file for this tile to align grid/extent |
|
60 |
gridfile=list.files("/nobackupp4/datapool/modis/MOD11A1.005/2006.01.27",pattern=paste(tile,".*[.]hdf$",sep=""),recursive=T,full=T)[1] |
|
61 |
td=readGDAL(paste("HDF4_EOS:EOS_GRID:\"",gridfile,"\":MODIS_Grid_Daily_1km_LST:Night_view_angl",sep="")) |
|
62 |
projection(td)="+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +a=6371007.181 +b=6371007.181 +units=m +no_defs +datum=WGS84 +ellps=WGS84 " |
|
63 |
|
|
57 | 64 |
## vars to process |
58 | 65 |
vars=as.data.frame(matrix(c( |
59 | 66 |
"Cloud_Effective_Radius", "CER", |
... | ... | |
68 | 75 |
"Quality_Assurance_1km", "QA"), |
69 | 76 |
byrow=T,ncol=2,dimnames=list(1:10,c("variable","varid"))),stringsAsFactors=F) |
70 | 77 |
|
78 |
## vector of variables expected to be in final netcdf file. If these are not present, the file will be deleted at the end. |
|
79 |
finalvars=c("CER","COT","CLD") |
|
71 | 80 |
|
72 | 81 |
############################################################################ |
73 | 82 |
############################################################################ |
74 | 83 |
### Define functions to process a particular date-tile |
75 | 84 |
|
76 |
#### get swaths |
|
77 |
#getswaths<-function(tile,db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db"){ |
|
78 |
# db=dbConnect( |
|
79 |
# } |
|
80 |
|
|
81 | 85 |
swath2grid=function(file,vars,upleft,lowright){ |
82 | 86 |
## Function to generate hegtool parameter file for multi-band HDF-EOS file |
83 | 87 |
print(paste("Starting file",basename(file))) |
... | ... | |
105 | 109 |
END |
106 | 110 |
|
107 | 111 |
",sep="") |
108 |
|
|
109 | 112 |
## if any remnants from previous runs remain, delete them |
110 | 113 |
if(length(list.files(tempdir(),pattern=basename(file)))>0) |
111 | 114 |
file.remove(list.files(tempdir(),pattern=basename(file),full=T)) |
... | ... | |
139 | 142 |
Sys.setenv(GRASS_GUI="txt") |
140 | 143 |
|
141 | 144 |
### Function to extract various SDSs from a single gridded HDF file and use QA data to throw out 'bad' observations |
142 |
loadcloud<-function(date,fs,ncfile){ |
|
143 |
tf=paste(tempdir(),"/grass", Sys.getpid(),"/", sep="") |
|
144 |
if(!file.exists(tf))dir.create(tf) |
|
145 |
|
|
146 |
print(paste("Set up temporary grass session in",tf)) |
|
147 |
|
|
145 |
loadcloud<-function(date,swaths,ncfile){ |
|
146 |
## make temporary working directory |
|
147 |
tf=paste(tempdir(),"/grass", Sys.getpid(),"/", sep="") #temporar |
|
148 |
if(!file.exists(tf)) dir.create(tf) |
|
149 |
## create output directory if needed |
|
150 |
if(!file.exists(dirname(ncfile))) dir.create(dirname(ncfile)) |
|
151 |
|
|
148 | 152 |
## set up temporary grass instance for this PID |
153 |
if(verbose) print(paste("Set up temporary grass session in",tf)) |
|
149 | 154 |
initGRASS(gisBase="/u/armichae/pr/grass-6.4.2/",gisDbase=tf,SG=td,override=T,location="mod06",mapset="PERMANENT",home=tf,pid=Sys.getpid()) |
150 | 155 |
system(paste("g.proj -c proj4=\"",projection(td),"\"",sep=""),ignore.stdout=T,ignore.stderr=T) |
151 | 156 |
|
... | ... | |
156 | 161 |
system("g.region rast=modisgrid save=roi --overwrite",ignore.stdout=T,ignore.stderr=T) |
157 | 162 |
|
158 | 163 |
## Identify which files to process |
159 |
tfs=fs$file[fs$dateid==date]
|
|
164 |
tfs=basename(swaths)
|
|
160 | 165 |
## drop swaths that did not produce an output file (typically due to not overlapping the ROI) |
161 | 166 |
tfs=tfs[tfs%in%list.files(tempdir())] |
162 | 167 |
nfs=length(tfs) |
... | ... | |
167 | 172 |
## Cloud Mask |
168 | 173 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",file,"\":mod06:Cloud_Mask_1km_0",sep=""), |
169 | 174 |
output=paste("CM1_",i,sep=""),flags=c("overwrite","o")) ; print("") |
170 |
## extract cloudy and 'confidently clear' pixels |
|
175 |
## extract cloudy and 'probably/confidently clear' pixels
|
|
171 | 176 |
system(paste("r.mapcalc <<EOF |
172 | 177 |
CM_cloud_",i," = ((CM1_",i," / 2^0) % 2) == 1 && ((CM1_",i," / 2^1) % 2^2) == 0 |
173 |
CM_clear_",i," = ((CM1_",i," / 2^0) % 2) == 1 && ((CM1_",i," / 2^1) % 2^2) == 3
|
|
178 |
CM_clear_",i," = ((CM1_",i," / 2^0) % 2) == 1 && ((CM1_",i," / 2^1) % 2^2) > 2
|
|
174 | 179 |
EOF",sep="")) |
175 | 180 |
## QA |
176 | 181 |
execGRASS("r.in.gdal",input=paste("HDF4_EOS:EOS_GRID:\"",file,"\":mod06:Quality_Assurance_1km_0",sep=""), |
... | ... | |
178 | 183 |
## QA_CER |
179 | 184 |
system(paste("r.mapcalc <<EOF |
180 | 185 |
QA_COT_",i,"= ((QA_",i," / 2^0) % 2^1 )==1 |
181 |
QA_COT2_",i,"= ((QA_",i," / 2^1) % 2^2 )==3
|
|
186 |
QA_COT2_",i,"= ((QA_",i," / 2^1) % 2^2 )>=2
|
|
182 | 187 |
QA_COT3_",i,"= ((QA_",i," / 2^3) % 2^2 )==0 |
183 | 188 |
QA_CER_",i,"= ((QA_",i," / 2^5) % 2^1 )==1 |
184 |
QA_CER2_",i,"= ((QA_",i," / 2^6) % 2^2 )==3
|
|
189 |
QA_CER2_",i,"= ((QA_",i," / 2^6) % 2^2 )>=2
|
|
185 | 190 |
EOF",sep="")) |
186 | 191 |
# QA_CWP_",i,"= ((QA_",i," / 2^8) % 2^1 )==1 |
187 | 192 |
# QA_CWP2_",i,"= ((QA_",i," / 2^9) % 2^2 )==3 |
... | ... | |
192 | 197 |
title="cloud_effective_radius", |
193 | 198 |
flags=c("overwrite","o")) ; print("") |
194 | 199 |
execGRASS("r.null",map=paste("COT_",i,sep=""),setnull="-9999") |
195 |
## keep only positive COT values where quality is 'useful' and 'very good' & scale to real units
|
|
200 |
## keep only positive COT values where quality is 'useful' and '>= good' & scale to real units
|
|
196 | 201 |
system(paste("r.mapcalc \"COT_",i,"=if(QA_COT_",i,"&&QA_COT2_",i,"&&QA_COT3_",i,"&&COT_",i,">=0,COT_",i,"*0.009999999776482582,null())\"",sep="")) |
197 | 202 |
## set COT to 0 in clear-sky pixels |
198 | 203 |
system(paste("r.mapcalc \"COT2_",i,"=if(CM_clear_",i,"==0,COT_",i,",0)\"",sep="")) |
... | ... | |
203 | 208 |
title="cloud_effective_radius", |
204 | 209 |
flags=c("overwrite","o")) ; print("") |
205 | 210 |
execGRASS("r.null",map=paste("CER_",i,sep=""),setnull="-9999") |
206 |
## keep only positive CER values where quality is 'useful' and 'very good' & scale to real units
|
|
211 |
## keep only positive CER values where quality is 'useful' and '>= good' & scale to real units
|
|
207 | 212 |
system(paste("r.mapcalc \"CER_",i,"=if(QA_CER_",i,"&&QA_CER2_",i,"&&CER_",i,">=0,CER_",i,"*0.009999999776482582,null())\"",sep="")) |
208 | 213 |
## set CER to 0 in clear-sky pixels |
209 | 214 |
system(paste("r.mapcalc \"CER2_",i,"=if(CM_clear_",i,"==0,CER_",i,",0)\"",sep="")) |
... | ... | |
242 | 247 |
# createopt=c("FORMAT=NC4","ZLEVEL=5","COMPRESS=DEFLATE","WRITE_GDAL_TAGS=YES","WRITE_LONLAT=NO"),format="netCDF") #for compressed netcdf |
243 | 248 |
createopt=c("FORMAT=NC","WRITE_GDAL_TAGS=YES","WRITE_LONLAT=NO"),format="netCDF") |
244 | 249 |
|
245 |
ncopath="/nasa/sles11/nco/4.0.8/gcc/mpt/bin/" |
|
246 | 250 |
system(paste(ncopath,"ncecat -O -u time ",ncfile," ",ncfile,sep="")) |
247 | 251 |
## create temporary nc file with time information to append to MOD06 data |
248 | 252 |
cat(paste(" |
... | ... | |
255 | 259 |
time:calendar = \"gregorian\"; |
256 | 260 |
time:long_name = \"time of observation\"; |
257 | 261 |
data: |
258 |
time=",as.integer(fs$date[fs$dateid==date][1]-as.Date("2000-01-01")),";
|
|
262 |
time=",as.integer(as.Date(date,"%Y%m%d")-as.Date("2000-01-01")),";
|
|
259 | 263 |
}"),file=paste(tempdir(),"/time.cdl",sep="")) |
260 | 264 |
system(paste("ncgen -o ",tempdir(),"/time.nc ",tempdir(),"/time.cdl",sep="")) |
261 | 265 |
system(paste(ncopath,"ncks -A ",tempdir(),"/time.nc ",ncfile,sep="")) |
... | ... | |
268 | 272 |
|
269 | 273 |
### delete the temporary files |
270 | 274 |
unlink_.gislock() |
271 |
system("clean_temp") |
|
272 | 275 |
system(paste("rm -frR ",tf,sep="")) |
273 | 276 |
} |
274 | 277 |
|
... | ... | |
282 | 285 |
## Run the gridding procedure |
283 | 286 |
tile_bb=tb[tb$tile==tile,] ## identify tile of interest |
284 | 287 |
|
285 |
# if(venezuela) tile_bb$lat_max=11.0780999176 #increase northern boundary for KT |
|
288 |
##find swaths in region from sqlite database for the specified date/tile |
|
289 |
if(verbose) print("Accessing swath ID's from database") |
|
290 |
con=dbConnect("SQLite", dbname = db) |
|
291 |
fs=dbGetQuery(con,paste("SELECT * from swath_geo |
|
292 |
WHERE east>=",tile_bb$lon_min," AND |
|
293 |
west<=",tile_bb$lon_max," AND |
|
294 |
north>=",tile_bb$lat_min," AND |
|
295 |
south<=",tile_bb$lat_max," AND |
|
296 |
year==",format(as.Date(date,"%Y%m%d"),"%Y")," AND |
|
297 |
day==",as.numeric(format(as.Date(date,"%Y%m%d"),"%j")) |
|
298 |
)) |
|
299 |
con=dbDisconnect(con) |
|
300 |
fs$id=substr(fs$id,7,19) |
|
301 |
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database")) |
|
302 |
|
|
303 |
## find the swaths on disk (using datadir) |
|
304 |
swaths=list.files(datadir,pattern=paste(fs$id,collapse="|"),recursive=T,full=T) |
|
286 | 305 |
|
287 |
lapply(fs$path[fs$dateid==date],swath2grid,vars=vars,upleft=paste(tile_bb$lat_max,tile_bb$lon_min),lowright=paste(tile_bb$lat_min,tile_bb$lon_max)) |
|
306 |
## Run the gridding procedure |
|
307 |
lapply(swaths,swath2grid,vars=vars,upleft=paste(tile_bb$lat_max,tile_bb$lon_min),lowright=paste(tile_bb$lat_min,tile_bb$lon_max)) |
|
288 | 308 |
|
289 | 309 |
## confirm at least one file for this date is present |
290 |
outfiles=paste(tempdir(),"/",basename(fs$path[fs$dateid==date]),sep="")
|
|
310 |
outfiles=paste(tempdir(),"/",basename(swaths),sep="")
|
|
291 | 311 |
if(!any(file.exists(outfiles))) { |
292 | 312 |
print(paste("######################################## No gridded files for region exist for tile",tile," on date",date)) |
293 | 313 |
q("no",status=0) |
294 | 314 |
} |
295 | 315 |
|
296 |
#####################################################
|
|
316 |
##################################################### |
|
297 | 317 |
## Process the gridded files |
298 |
|
|
299 |
## run themod06 processing for this date |
|
318 |
|
|
319 |
## run the mod06 processing for this date
|
|
300 | 320 |
ncfile=paste(outdir,"/MOD06_",tile,"_",date,".nc",sep="") #this is the 'final' daily output file |
301 |
loadcloud(date,fs=fs,ncfile=ncfile)
|
|
302 |
|
|
321 |
loadcloud(date,swaths=swaths,ncfile=ncfile)
|
|
322 |
|
|
303 | 323 |
## Confirm that the file has the correct attributes, otherwise delete it |
304 | 324 |
ntime=as.numeric(system(paste("cdo -s ntime ",ncfile),intern=T)) |
305 |
npar=as.numeric(system(paste("cdo -s npar ",ncfile),intern=T)) |
|
306 |
if(!ntime==1&npar>0) { |
|
307 |
print(paste("File for tile ",tile," and date ",date," was not outputted correctly, deleting... ")) |
|
308 |
file.remove(ncfile) |
|
309 |
} |
|
310 |
|
|
311 |
## push file to lou and then delete it from pfe |
|
312 |
# system(paste("scp ",ncfile," ",ldir,sep="") ) |
|
313 |
# file.remove(ncfile) |
|
314 |
|
|
325 |
## confirm it has all 'final variables as specified above" |
|
326 |
fvar=all(finalvars%in%do.call(c,strsplit(system(paste("cdo -s showvar ",ncfile),intern=T)," "))) |
|
327 |
|
|
328 |
if(!ntime==1&fvar) { |
|
329 |
print(paste("FILE ERROR: tile ",tile," and date ",date," was not outputted correctly, deleting... ")) |
|
330 |
file.remove(ncfile) |
|
331 |
} |
|
332 |
|
|
315 | 333 |
## print out some info |
316 | 334 |
print(paste(" ################################################################### Finished ",date," |
317 | 335 |
################################################################")) |
... | ... | |
327 | 345 |
#foreach(i=1:20) %dopar% print(i) |
328 | 346 |
|
329 | 347 |
## delete old files |
330 |
#system("cleartemp")
|
|
348 |
system("cleartemp") |
|
331 | 349 |
|
332 | 350 |
q("no",status=0) |
climate/procedures/Pleiades.R | ||
---|---|---|
14 | 14 |
system("rm /nobackupp1/awilso10/software/heg/TOOLKIT_MTD/runtime/LogStatus") |
15 | 15 |
|
16 | 16 |
tile="h11v08" # Venezuela |
17 |
tile="h11v07" # Venezuela coast |
|
18 |
tile="h09v04" # Oregon |
|
19 |
|
|
20 |
outdir="2_daily" #directory for separate daily files |
|
21 |
outdir=paste("daily/",tile,"/",sep="") #directory for separate daily files |
|
22 |
if(!file.exists(outdir)) dir.create(outdir) |
|
23 |
outdir2="summary" #directory for combined daily files and summarized files |
|
24 |
if(!file.exists(outdir2)) dir.create(outdir2) |
|
25 |
|
|
26 |
## load a MOD11A1 file to define grid |
|
27 |
gridfile=list.files("/nobackupp4/datapool/modis/MOD11A1.005/2006.01.27/",pattern=paste(tile,".*hdf$",sep=""),full=T)[1] |
|
28 |
td=readGDAL(paste("HDF4_EOS:EOS_GRID:\"",gridfile,"\":MODIS_Grid_Daily_1km_LST:Night_view_angl",sep="")) |
|
29 |
projection(td)="+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +a=6371007.181 +b=6371007.181 +units=m +no_defs +datum=WGS84 +ellps=WGS84 " |
|
30 |
|
|
17 |
#tile="h11v07" # Venezuela coast |
|
18 |
#tile="h09v04" # Oregon |
|
19 |
tile="h21v09" #Kenya |
|
31 | 20 |
|
32 | 21 |
### get list of files to process |
33 | 22 |
datadir="/nobackupp4/datapool/modis/MOD06_L2.005/" |
34 |
datadir="/nobackupp1/awilso10/mod06/data" #for data downloaded from |
|
35 |
|
|
36 |
fs=data.frame(path=list.files(datadir,full=T,recursive=T,pattern="hdf"),stringsAsFactors=F) |
|
37 |
fs$file=basename(fs$path) |
|
38 |
fs$date=as.Date(substr(fs$file,11,17),"%Y%j") |
|
39 |
fs$month=format(fs$date,"%m") |
|
40 |
fs$year=format(fs$date,"%Y") |
|
41 |
fs$time=substr(fs$file,19,22) |
|
42 |
fs$datetime=as.POSIXct(strptime(paste(substr(fs$file,11,17),substr(fs$file,19,22)), '%Y%j %H%M')) |
|
43 |
fs$dateid=format(fs$date,"%Y%m%d") |
|
44 |
fs$path=as.character(fs$path) |
|
45 |
fs$file=as.character(fs$file) |
|
23 |
#datadir="/nobackupp1/awilso10/mod06/data" #for data downloaded from |
|
24 |
|
|
25 |
outdir=paste("daily/",tile,sep="") |
|
26 |
|
|
27 |
##find swaths in region from sqlite database for the specified date/tile |
|
28 |
## path to swath database |
|
29 |
db="/nobackupp4/pvotava/DB/export/swath_geo.sql.sqlite3.db" |
|
30 |
con=dbConnect("SQLite", dbname = db) |
|
31 |
fs=dbGetQuery(con,paste("SELECT * from swath_geo |
|
32 |
WHERE east>=",tile_bb$lon_min," AND |
|
33 |
west<=",tile_bb$lon_max," AND |
|
34 |
north>=",tile_bb$lat_min," AND |
|
35 |
south<=",tile_bb$lat_max) |
|
36 |
) |
|
37 |
con=dbDisconnect(con) |
|
38 |
fs$id=substr(fs$id,7,19) |
|
39 |
|
|
40 |
### Identify which swaths are available in the datapool |
|
41 |
swaths=data.frame(path=list.files(datadir,pattern=paste("hdf$"),recursive=T,full=T),stringsAsFactors=F) #all swaths in data pool |
|
42 |
swaths$id=substr(basename(swaths$path),10,22) |
|
43 |
fs$exists=fs$id%in%swaths$id |
|
44 |
fs$path=swaths$path[match(fs$id,swaths$id)] |
|
45 |
|
|
46 |
if(verbose) print(paste("###############",nrow(fs)," swath IDs recieved from database")) |
|
46 | 47 |
|
47 |
## get all unique dates |
|
48 |
alldates=unique(fs$dateid) |
|
49 | 48 |
|
49 |
## get all unique dates |
|
50 |
fs$dateid=format(as.Date(paste(fs$year,fs$day,sep=""),"%Y%j"),"%Y%m%d") |
|
51 |
alldates=unique(fs$dateid[fs$exists]) |
|
50 | 52 |
|
51 | 53 |
#### Generate submission file |
52 | 54 |
## identify which have been completed |
53 |
#fdone=system(paste("ssh -q lou2 \"ls ",pdir," | grep \"nc$\"\"",sep=""),intern=T) |
|
54 | 55 |
fdone=list.files(outdir,pattern="nc$") |
55 | 56 |
done=alldates%in%substr(fdone,14,21) |
56 | 57 |
|
57 |
if(exists("fdly")){ #update using table from below |
|
58 |
done[alldates%in%fdly$dateid[is.na(fdly$npar)]]=F |
|
59 |
} |
|
58 |
### report on what has already been processed |
|
59 |
print(paste(table(done)[2]," out of",length(alldates), |
|
60 |
"(",round(100*table(done)[2]/length(alldates),1), |
|
61 |
"%) dates for tile",tile, |
|
62 |
"have been processed. Breakdown by year of completed days:")) |
|
63 |
print(table(substr(alldates[done],1,4))) |
|
64 |
|
|
65 |
#updatedone=F #update the "done" list using the |
|
66 |
#if(updatedone&exists("fdly")){ #update using table from below |
|
67 |
# done[alldates%in%fdly$dateid[fdly$drop]]=F |
|
68 |
#} |
|
60 | 69 |
|
61 |
table(done) |
|
62 |
notdone=alldates[!done] #these are the dates that still need to be processed |
|
70 |
## Identify which dates still need to be processed |
|
71 |
## This vector will be used to tell mpiexec which days to include |
|
72 |
notdone=alldates[!done] |
|
63 | 73 |
|
64 | 74 |
script="/u/awilso10/environmental-layers/climate/procedures/MOD06_L2_process.r" |
75 |
climatescript="/u/awilso10/environmental-layers/climate/procedures/MOD06_Climatology.r" |
|
65 | 76 |
|
77 |
## write the table processed by mpiexec |
|
66 | 78 |
write.table(paste("--verbose ",script," --date ",notdone," --verbose T --tile \"",tile,"\"",sep=""),file=paste(tile,"_notdone.txt",sep=""),row.names=F,col.names=F,quote=F) |
67 | 79 |
|
68 |
save(fs,alldates,gridfile,td,file="allfiles.Rdata") |
|
69 |
|
|
70 | 80 |
### qsub script |
71 | 81 |
cat(paste(" |
72 | 82 |
#PBS -S /bin/bash |
73 |
#PBS -l select=40:ncpus=8:mpiprocs=8
|
|
83 |
#PBS -l select=50:ncpus=8:mpiprocs=8
|
|
74 | 84 |
##PBS -l select=2:ncpus=4:mpiprocs=4 |
75 | 85 |
#PBS -l walltime=2:00:00 |
76 | 86 |
#PBS -j n |
... | ... | |
79 | 89 |
#PBS -q devel |
80 | 90 |
#PBS -V |
81 | 91 |
|
82 |
CORES=320
|
|
92 |
CORES=400
|
|
83 | 93 |
HDIR=/u/armichae/pr/ |
84 | 94 |
source $HDIR/etc/environ.sh |
85 | 95 |
source /u/awilso10/.bashrc |
... | ... | |
87 | 97 |
##WORKLIST=$HDIR/var/run/pxrRgrs/work.txt |
88 | 98 |
WORKLIST=$IDIR/",tile,"_notdone.txt |
89 | 99 |
EXE=Rscript |
90 |
LOGSTDOUT=$IDIR/log/log.stdout |
|
91 |
LOGSTDERR=$IDIR/log/log.stderr |
|
100 |
LOGSTDOUT=$IDIR/log/",tile,"_stdout |
|
101 |
LOGSTDERR=$IDIR/log/",tile,"_stderr |
|
102 |
### use mpiexec to parallelize across days |
|
92 | 103 |
mpiexec -np $CORES pxargs -a $WORKLIST -p $EXE -v -v -v --work-analyze 1> $LOGSTDOUT 2> $LOGSTDERR |
104 |
### Now process the climatologies |
|
105 |
Rscript --verbose ",climatescript," --verbose T --tile \"",tile,"\" |
|
93 | 106 |
",sep=""),file=paste(tile,"_mod06_qsub",sep="")) |
94 | 107 |
|
95 | 108 |
|
96 | 109 |
### Check the file |
97 | 110 |
system(paste("cat ",tile,"_mod06_qsub",sep="")) |
98 |
#system("cat ~/environmental-layers/climate/procedures/MOD06_L2_process.r") |
|
99 |
|
|
100 |
## check queue status |
|
101 |
system("/u/scicon/tools/bin/node_stats.sh") |
|
102 |
system("/u/scicon/tools/bin/qtop.pl 492352") |
|
103 | 111 |
|
104 | 112 |
## Submit it (and keep the pid)! |
105 | 113 |
system(paste("qsub ",tile,"_mod06_qsub",sep="")) |
... | ... | |
110 | 118 |
|
111 | 119 |
## check progress |
112 | 120 |
system("qstat -u awilso10") |
113 |
system(paste("/u/scicon/tools/bin/qps ",568835)) |
|
114 |
system(paste("qstat -t -x",pid)) |
|
115 |
|
|
116 |
|
|
117 |
#################################### |
|
118 |
|
|
119 |
## move to permanent storage on lou? |
|
120 |
# system(paste("scp -r ",ncfile," ",pdir,sep="")) |
|
121 |
file.remove( |
|
122 |
list.files(pattern="filetable[.]temp|GetAttrtemp|core[.]|MCFWrite[.]temp") |
|
123 |
) |
|
124 |
|
|
125 |
################################################################################ |
|
126 |
## now generate the climatologies |
|
127 |
fdly=data.frame( |
|
128 |
path=list.files(outdir,pattern="nc$",full=T), |
|
129 |
file=list.files(outdir,pattern="nc$")) |
|
130 |
fdly$dateid=substr(fdly$file,14,21) |
|
131 |
fdly$date=as.Date(substr(fdly$file,14,21),"%Y%m%d") |
|
132 |
fdly$month=format(fdly$date,"%m") |
|
133 |
fdly$year=format(fdly$date,"%Y") |
|
134 |
nrow(fdly) |
|
135 |
|
|
136 |
## check validity (via npar and ntime) of nc files |
|
137 |
for(i in 1:nrow(fdly)){ |
|
138 |
fdly$ntime[i]<-as.numeric(system(paste("cdo -s ntime ",fdly$path[i]),intern=T)) |
|
139 |
fdly$npar[i]<-as.numeric(system(paste("cdo -s npar ",fdly$path[i]),intern=T)) |
|
140 |
fdly$fyear[i]<-as.numeric(system(paste("cdo -s showyear ",fdly$path[i]),intern=T)) |
|
141 |
fdly$fmonth[i]<-as.numeric(system(paste("cdo -s showmon ",fdly$path[i]),intern=T)) |
|
142 |
fdly$fvar[i]<-system(paste("cdo -s showvar ",fdly$path[i]),intern=T) |
|
143 |
print(paste(i," out of ",nrow(fdly)," for year ", fdly$fyear[i])) |
|
144 |
} |
|
145 |
|
|
146 |
### table of problematic files |
|
147 |
table(is.na(fdly$npar)) |
|
148 |
table(fdly$fmonth) |
|
149 |
table(fdly$fvar) |
|
150 |
|
|
151 |
fdly[is.na(fdly$npar),] |
|
152 |
|
|
153 |
## delete files that fail check? |
|
154 |
delete=T |
|
155 |
if(delete) { |
|
156 |
drop=is.na(fdly$npar)|fdly$fvar!=" CER COT CLD" |
|
157 |
file.remove(as.character(fdly$path[drop])) |
|
158 |
fdly=fdly[!drop,] |
|
159 |
} |
|
160 |
|
|
161 |
## Combine all days within years into a single file (can't mergetime all days at once because this opens too many files) |
|
162 |
ncopath="/nasa/sles11/nco/4.0.8/gcc/mpt/bin/" |
|
163 |
|
|
164 |
library(multicore) |
|
165 |
|
|
166 |
## create temporary directory to put intermediate files (will be deleted when R quits) |
|
167 |
tsdir=paste(tempdir(),"/summary",sep="") |
|
168 |
dir.create(tsdir) |
|
169 |
|
|
170 |
## create a file of daily scenes for each year |
|
171 |
#mclapply(unique(fdly$year),function(y){ |
|
172 |
# system(paste("cdo -O mergetime ",paste(fdly$path[!is.na(fdly$npar)&fdly$year==y],collapse=" ")," ",tsdir,"/MOD06_",tile,"_",y,"_daily.nc",sep="")) |
|
173 |
# print(paste("Finished merging daily files for year",y)) |
|
174 |
#},mc.cores=5) |
|
175 |
|
|
176 |
|
|
177 |
## merge all daily files to create a single file with all dates |
|
178 |
system(paste(ncopath,"ncrcat -O ",outdir,"/*nc ",outdir2,"/MOD06_",tile,"_daily.nc",sep="")) |
|
179 |
|
|
180 |
############################## |
|
181 |
## Combine the year-by-year files into a single daily file in the summary directory (for archiving) |
|
182 |
#system(paste("cdo -O mergetime ",paste(list.files(tsdir,full=T,pattern="daily[.]nc$"),collapse=" ")," ",outdir2,"/MOD06_",tile,"_daily.nc",sep="")) |
|
183 |
system(paste(ncopath,"ncatted ", |
|
184 |
" -a units,time,o,c,\"days since 2000-1-1 0:0:0\" ", |
|
185 |
" -a title,global,o,c,\"MODIS Cloud Product (MOD06) Daily Timeseries\" ", |
|
186 |
" -a institution,global,o,c,\"Yale University\" ", |
|
187 |
" -a source,global,o,c,\"MODIS Cloud Product (MOD06)\" ", |
|
188 |
" -a comment,global,o,c,\"Compiled by Adam M. Wilson (adam.wilson@yale.edu)\" ", |
|
189 |
outdir2,"/MOD06_",tile,"_daily.nc",sep="")) |
|
190 |
|
|
191 |
#system(paste("cdo -O monmean ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_monmean.nc",sep="")) |
|
192 |
|
|
193 |
############################# |
|
194 |
## Generate the Climatologies |
|
195 |
myear=as.integer(max(fdly$year)) #this year will be used in all dates of monthly climatologies (and day will = 15) |
|
196 |
|
|
197 |
## Monthly means |
|
198 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonmean ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonmean.nc",sep="")) |
|
199 |
|
|
200 |
## Monthly standard deviation |
|
201 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonstd ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
202 |
system(paste(ncopath,"ncrename -v CER,CER_sd -v CLD,CLD_sd -v COT,COT_sd ",tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
203 |
system(paste(ncopath,"ncatted ", |
|
204 |
" -a long_name,CER_sd,o,c,\"Cloud Particle Effective Radius (standard deviation of daily observations)\" ", |
|
205 |
" -a long_name,CLD_sd,o,c,\"Cloud Mask (standard deviation of daily observations)\" ", |
|
206 |
" -a long_name,COT_sd,o,c,\"Cloud Optical Thickness (standard deviation of daily observations)\" ", |
|
207 |
tsdir,"/MOD06_",tile,"_ymonstd.nc",sep="")) |
|
208 |
|
|
209 |
## cer > 20 |
|
210 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -ymonmean -gtc,20 -selvar,CER ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
211 |
system(paste(ncopath,"ncrename -v CER,CER20 ",tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
212 |
system(paste(ncopath,"ncatted ", |
|
213 |
" -a long_name,CER20,o,c,\"Proportion of Days with Cloud Particle Effective Radius > 20um\" ", |
|
214 |
" -a units,CER20,o,c,\"Proportion\" ", |
|
215 |
tsdir,"/MOD06_",tile,"_ymoncer20.nc",sep="")) |
|
216 |
|
|
217 |
## number of observations |
|
218 |
system(paste("cdo -O sorttimestamp -setyear,",myear," -setday,15 -nint -mulc,100 -ymonmean -eqc,9999 -setmisstoc,9999 -selvar,CER,CLD ",outdir2,"/MOD06_",tile,"_daily.nc ",tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
219 |
system(paste(ncopath,"ncrename -v CER,CER_pmiss -v CLD,CLD_pmiss ",tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
220 |
system(paste(ncopath,"ncatted ", |
|
221 |
" -a long_name,CER_pmiss,o,c,\"Proportion of Days with missing data for CER\" ", |
|
222 |
" -a long_name,CLD_pmiss,o,c,\"Proportion of Days with missing data for CLD\" ", |
|
223 |
" -a scale_factor,CER_pmiss,o,d,0.01 ", |
|
224 |
" -a units,CER_pmiss,o,c,\"Proportion\" ", |
|
225 |
" -a scale_factor,CLD_pmiss,o,d,0.01 ", |
|
226 |
" -a units,CLD_pmiss,o,c,\"Proportion\" ", |
|
227 |
tsdir,"/MOD06_",tile,"_ymonmiss.nc",sep="")) |
|
228 |
|
|
229 |
## append variables to a single file |
|
230 |
system(paste(ncopath,"ncks -O ",tsdir,"/MOD06_",tile,"_ymonmean.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
231 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymonstd.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
232 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymoncer20.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
233 |
system(paste(ncopath,"ncks -A ",tsdir,"/MOD06_",tile,"_ymonmiss.nc ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
234 |
|
|
235 |
## append sinusoidal grid from one of input files as CDO doesn't transfer all attributes |
|
236 |
system(paste(ncopath,"ncks -A -v sinusoidal ",list.files(tsdir,full=T,pattern="daily[.]nc$")[1]," ",tsdir,"/MOD06_",tile,"_ymon.nc",sep="")) |
|
237 |
|
|
238 |
## invert latitude so it plays nicely with gdal |
|
239 |
system(paste(ncopath,"ncpdq -O -a -y ",tsdir,"/MOD06_",tile,"_ymon.nc ",outdir2,"/MOD06_",tile,".nc",sep="")) |
|
240 |
|
|
241 |
## update attributes |
|
242 |
system(paste(ncopath,"ncatted ", |
|
243 |
#" -a standard_parallel,sinusoidal,o,c,\"0\" ", |
|
244 |
#" -a longitude_of_central_meridian,sinusoidal,o,c,\"0\" ", |
|
245 |
#" -a latitude_of_central_meridian,sinusoidal,o,c,\"0\" ", |
|
246 |
" -a units,time,o,c,\"days since 2000-1-1 0:0:0\" ", |
|
247 |
" -a title,global,o,c,\"MODIS Cloud Product (MOD06) Climatology\" ", |
|
248 |
" -a institution,global,o,c,\"Yale University\" ", |
|
249 |
" -a source,global,o,c,\"MODIS Cloud Product (MOD06)\" ", |
|
250 |
" -a comment,global,o,c,\"Compiled by Adam M. Wilson (adam.wilson@yale.edu)\" ", |
|
251 |
outdir2,"/MOD06_",tile,".nc",sep="")) |
|
252 |
|
|
253 |
|
|
254 |
print("Finished! %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%") |
|
255 |
## quit R |
|
256 |
#q("no") |
|
257 |
|
|
258 | 121 |
|
259 |
################################################################# |
|
260 | 122 |
|
123 |
################################################################# |
|
261 | 124 |
### copy the files back to Yale |
262 |
system(paste("scp ",outdir2,"/MOD06_",tile,".nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep="")) |
|
125 |
summarydir="summary" |
|
126 |
|
|
127 |
|
|
263 | 128 |
|
264 |
system("scp /tmp/Rtmp6I6tFn/MOD06_L2.A2000061.1615.051.2010273184629.hdf adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/Venezuela") |
|
265 |
system("scp 2_daily/MOD06_20000410.nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/Venezuela") |
|
129 |
system(paste("scp ",summarydir,"/MOD06_",tile,".nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep="")) |
|
266 | 130 |
|
131 |
system(paste("scp ",tsdir,"/MOD06_",tile,"*.nc adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/summary",sep="")) |
|
132 |
system(paste("scp ",paste(fs$path[40421:40422],collapse=" ")," adamw@acrobates.eeb.yale.edu:/data/personal/adamw/projects/interp/data/modis/mod06/swaths",sep="")) |
|
267 | 133 |
|
268 | 134 |
|
269 | 135 |
|
Also available in: Unified diff
Adding Climatology script to process daily files to climatologies