Revision af411ccf
Added by Adam Wilson about 11 years ago
climate/research/cloud/MOD09/MOD09_CloudFigures.R | ||
---|---|---|
30 | 30 |
#cld$month2=factor(cld$month,labels=month.name) |
31 | 31 |
cldm$month2=factor(cldm$month,labels=month.name) |
32 | 32 |
|
33 |
### Drop valitation station-months with fewer than 20 years of data for full record or less than 10 years for MODIS-era record |
|
34 |
cldm$cld_all[cldm$cldn_all<20]=NA |
|
35 |
cldm$cldsd_all[cldm$cldn_all<20]=NA |
|
36 |
|
|
37 |
cldm$cld[cldm$cldn<10]=NA |
|
38 |
cldm$cldsd[cldm$cldn<10]=NA |
|
39 |
|
|
33 | 40 |
coordinates(st)=c("lon","lat") |
34 | 41 |
projection(st)=CRS("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") |
35 | 42 |
|
... | ... | |
50 | 57 |
mod09metrics=stack(mod09a,mod09min,mod09max,mod09sd) |
51 | 58 |
names(mod09metrics)=c("Mean","Minimum","Maximum","Standard Deviation") |
52 | 59 |
|
60 |
mod09inter=brick("data/cloud_std_inter.nc",varname="CF_sd") |
|
61 |
mod09intra=brick("data/cloud_std_intra.nc",varname="CFsd") |
|
62 |
|
|
63 |
## get stratified sample of points from biomes for illustration |
|
64 |
if(!file.exists("output/biomesamplepoints.csv")){ |
|
65 |
biome=readOGR("data/teow/","biomes") |
|
66 |
n_biomesamples=1000 |
|
67 |
library(multicore) |
|
68 |
biomesample=do.call(rbind.data.frame,mclapply(1:length(biome),function(i) |
|
69 |
data.frame(code=biome$code[i],coordinates(spsample(biome[i,],n=n_biomesamples,type="stratified",nsig=2))))) |
|
70 |
colnames(biomesample)[2:3]=c("lon","lat") |
|
71 |
biomesample[,c("biome","realm")]=biome@data[match(biomesample$code,biome$code),c("biome","realm")] |
|
72 |
write.csv(biomesample,"output/biomesamplepoints.csv",row.names=F) |
|
73 |
} |
|
74 |
|
|
75 |
## Extract data for points |
|
76 |
if(!file.exists("output/biomesamplepoints_cloud.csv")){ |
|
77 |
biomesample=read.csv("output/biomesamplepoints.csv") |
|
78 |
biomep=raster::extract(mod09c,biomesample,sp=T) |
|
79 |
biomep$lon=biomesample$lon |
|
80 |
biomep@data[,c("lon","lat")]=coordinates(biomep) |
|
81 |
write.csv(biomep@data,"output/biomesamplepoints_cloud.csv",row.names=F) |
|
82 |
} |
|
83 |
biomep=read.csv("output/biomesamplepoints_cloud.csv") |
|
84 |
coordinates(biomep)=c("lon","lat") |
|
85 |
projection(biomep)="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" |
|
86 |
|
|
87 |
|
|
88 |
## get stratified sample of points from biomes for illustration |
|
89 |
if(!file.exists("output/biomesamplepoints.csv")){ |
|
90 |
biome=readOGR("data/teow/","biomes") |
|
91 |
n_biomesamples=1000 |
|
92 |
library(multicore) |
|
93 |
biomesample=do.call(rbind.data.frame,mclapply(1:length(biome),function(i) |
|
94 |
data.frame(code=biome$code[i],coordinates(spsample(biome[i,],n=n_biomesamples,type="stratified",nsig=2))))) |
|
95 |
colnames(biomesample)[2:3]=c("lon","lat") |
|
96 |
biomesample[,c("biome","realm")]=biome@data[match(biomesample$code,biome$code),c("biome","realm")] |
|
97 |
write.csv(biomesample,"output/biomesamplepoints.csv",row.names=F) |
|
98 |
} |
|
99 |
|
|
100 |
## Extract data for points |
|
101 |
if(!file.exists("output/biomesamplepoints_cloud.csv")){ |
|
102 |
biomesample=read.csv("output/biomesamplepoints.csv") |
|
103 |
biomep=raster::extract(mod09c,biomesample,sp=T) |
|
104 |
biomep$lon=biomesample$lon |
|
105 |
biomep@data[,c("lon","lat")]=coordinates(biomep) |
|
106 |
write.csv(biomep@data,"output/biomesamplepoints_cloud.csv",row.names=F) |
|
107 |
} |
|
108 |
biomep=read.csv("output/biomesamplepoints_cloud.csv") |
|
109 |
coordinates(biomep)=c("lon","lat") |
|
110 |
projection(biomep)="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" |
|
111 |
|
|
112 |
|
|
53 | 113 |
#plot(mod09a,layers=1,margin=F,maxpixels=100) |
54 | 114 |
|
55 | 115 |
## calculated differences |
... | ... | |
111 | 171 |
#pdf("output/Figures.pdf",width=11,height=8.5) |
112 | 172 |
png("output/CF_Figures_%03d.png",width=5000,height=4000,res=600,pointsize=42,bg="white") |
113 | 173 |
|
114 |
res=1e4 |
|
174 |
## set plotting parameters |
|
175 |
my.theme = trellis.par.get() |
|
176 |
my.theme$strip.background=list(col="transparent") |
|
177 |
trellis.par.set(my.theme) |
|
178 |
|
|
179 |
res=1e6 |
|
115 | 180 |
greg=list(ylim=c(-60,84),xlim=c(-180,180)) |
116 | 181 |
|
117 | 182 |
## Figure 1: 4-panel summaries |
118 | 183 |
#- Annual average |
119 |
levelplot(mod09a,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(title="Cloud Frequency (%)",space="bottom",adj=1),
|
|
184 |
levelplot(mod09a,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(space="bottom",adj=1), |
|
120 | 185 |
margin=F,maxpixels=res,ylab="",xlab="",useRaster=T,ylim=greg$ylim)+ |
121 |
layer(sp.lines(coast,col="black"),under=F) |
|
186 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
187 |
layer(sp.lines(coast,col="black"),under=F) |
|
122 | 188 |
## Mean annual with validation stations |
123 | 189 |
levelplot(mod09a,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(title="Cloud Frequency (%)",space="bottom",adj=1), |
124 | 190 |
margin=F,maxpixels=res,ylab="",xlab="",useRaster=T,ylim=greg$ylim)+ |
125 |
layer(panel.xyplot(lon,lat,pch=16,cex=.3,col="black"),data=data.frame(coordinates(st)))+ |
|
191 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
192 |
layer(panel.xyplot(lon,lat,pch=16,cex=.3,col="black"),data=data.frame(coordinates(st)))+ |
|
126 | 193 |
layer(sp.lines(coast,col="black"),under=F) |
127 | 194 |
|
128 | 195 |
## Seasonal Means |
129 | 196 |
levelplot(mod09s,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(title="Cloud Frequency (%)", space="bottom",adj=2), |
130 | 197 |
margin=F,maxpixels=res,ylab="",xlab="",useRaster=T,ylim=greg$ylim)+ |
131 |
layer(sp.lines(coast,col="black"),under=F) |
|
198 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
199 |
layer(sp.lines(coast,col="black"),under=F) |
|
132 | 200 |
|
133 |
## four metics |
|
134 |
levelplot(mod09metrics,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(title="Cloud Frequency (%)", space="bottom",adj=2), |
|
135 |
margin=F,maxpixels=res,ylab="",xlab="",useRaster=T,ylim=greg$ylim)+ |
|
136 |
layer(sp.lines(coast,col="black"),under=F) |
|
137 | 201 |
|
138 | 202 |
## Monthly Means |
139 | 203 |
levelplot(mod09c,col.regions=colr(n),cuts=100,at=seq(0,100,len=100),colorkey=list(title="Cloud Frequency (%)", space="bottom",adj=1), |
140 | 204 |
margin=F,maxpixels=res,ylab="Latitude",xlab="Longitude",useRaster=T,ylim=greg$ylim)+ |
141 |
layer(sp.lines(coast,col="black"),under=F) |
|
205 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
206 |
layer(sp.lines(coast,col="black"),under=F) |
|
142 | 207 |
|
143 | 208 |
#- Monthly minimum |
144 | 209 |
#- Monthly maximum |
145 | 210 |
#- STDEV or Min-Max |
146 |
p_mac=levelplot(mod09a,col.regions=colr(n),cuts=99,at=seq(0,100,length=100),margin=F,maxpixels=res/10,colorkey=list(title="Cloud Frequency (%)", space="bottom",height=.75),xlab="",ylab="",useRaster=T)+ |
|
147 |
layer(sp.lines(coast,col="black"),under=F) |
|
148 |
p_min=levelplot(mod09min,col.regions=colr(n),cuts=99,margin=F,maxpixels=res/10,colorkey=list(title="Cloud Frequency (%)", space="bottom",height=.75),useRaster=T)+ |
|
149 |
layer(sp.lines(coast,col="black"),under=F) |
|
150 |
p_max=levelplot(mod09max,col.regions=colr(n),cuts=99,margin=F,maxpixels=res/10,colorkey=list(space="bottom",height=.75),useRaster=T)+ |
|
151 |
layer(sp.lines(coast,col="black"),under=F) |
|
152 |
p_sd=levelplot(mod09sd,col.regions=colr(n),cuts=99,at=seq(0,100,length=100),margin=F,maxpixels=res/10,colorkey=list(space="bottom",height=.75),useRaster=T)+ |
|
153 |
layer(sp.lines(coast,col="black"),under=F) |
|
154 |
p3=c("Mean Cloud Frequency (%)"=p_mac,"Max Cloud Frequency (%)"=p_max,"Min Cloud Frequency (%)"=p_min,"Cloud Frequency Variability (SD)"=p_sd,x.same=T,y.same=T,merge.legends=T,layout=c(2,2)) |
|
211 |
p_mac=levelplot(mod09a,col.regions=colr(n),cuts=99,at=seq(0,100,length=100),margin=F, |
|
212 |
ylim=greg$ylim,maxpixels=res/10,colorkey=list(title="Cloud Frequency (%)", space="top",height=.75),xlab="",ylab="",useRaster=T)+ |
|
213 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
214 |
layer(sp.lines(coast,col="black"),under=F) |
|
215 |
p_max=levelplot(mod09max,col.regions=colr(n),cuts=99,at=seq(0,100,length=100),margin=F,maxpixels=res/10, |
|
216 |
ylim=greg$ylim,colorkey=list(space="bottom",height=.75),useRaster=T)+ |
|
217 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
218 |
layer(sp.lines(coast,col="black"),under=F) |
|
219 |
p_intra=levelplot(mod09intra,col.regions=colr(n),cuts=99,at=seq(0,40,length=100),margin=F,maxpixels=res/100, |
|
220 |
ylim=greg$ylim,colorkey=list(space="bottom",height=.75),useRaster=T)+ |
|
221 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
222 |
layer(sp.lines(coast,col="black"),under=F) |
|
223 |
p_inter=levelplot(mod09inter,col.regions=colr(n),cuts=99,at=seq(0,15,length=100),margin=F,maxpixels=res/100, |
|
224 |
ylim=greg$ylim,colorkey=list(title="Cloud Frequency (%)", space="top",height=.75),useRaster=T)+ |
|
225 |
layer(panel.polygon(x=c(-180,-180,180,180),y=c(-90,90,90,-90),col="black"),under=T)+ |
|
226 |
layer(sp.lines(coast,col="black"),under=F) |
|
227 |
|
|
228 |
p3=c("Mean Cloud Frequency (%)"=p_mac,"Max Cloud Frequency (%)"=p_max,"Interannual Variability (sd)"=p_inter,"Intraannual Variability (sd)"=p_intra,x.same=T,y.same=F,merge.legends=T,layout=c(2,2)) |
|
155 | 229 |
print(p3) |
156 | 230 |
|
157 | 231 |
bgr=function(x,n=100,br=0,c1=c("darkblue","blue","grey"),c2=c("grey","red","purple")){ |
... | ... | |
161 | 235 |
return(list(at=at,col=c(bg(sum(at<br)),gr(sum(at>=br))))) |
162 | 236 |
} |
163 | 237 |
|
164 |
colat=bgr(cldm$difm) |
|
165 |
phist=histogram(cldm$difm,breaks=colat$at,border=NA,col=colat$col,xlim=c(-50,40),type="count",xlab="Difference (MOD09-NDP026D)")#,seq(0,1,len=6),na.rm=T) |
|
166 |
pmap=xyplot(lat~lon|month2,data=cldm,groups=cut(cldm$difm,rev(colat$at)), |
|
238 |
cldm$resid=NA |
|
239 |
# get residuals of simple linear model |
|
240 |
cldm$resid[!is.na(cldm$cld_all)&!is.na(cldm$mod09)]=residuals(lm(mod09~cld_all,data=cldm)) |
|
241 |
colat=bgr(cldm$resid) |
|
242 |
phist=histogram(cldm$resid,breaks=colat$at,border=NA,col=colat$col,xlim=c(-30,30),type="count",xlab="MODCF Residuals")#,seq(0,1,len=6),na.rm=T) |
|
243 |
pmap=xyplot(lat~lon|month2,data=cldm,groups=cut(cldm$resid,rev(colat$at)), |
|
167 | 244 |
par.settings=list(superpose.symbol=list(col=colat$col)),pch=16,cex=.25, |
168 | 245 |
auto.key=F,#list(space="right",title="Difference\n(MOD09-NDP026D)",cex.title=1),asp=1, |
169 | 246 |
ylab="Latitude",xlab="Longitude")+ |
... | ... | |
174 | 251 |
### heatmap of mod09 vs. NDP for all months |
175 | 252 |
hmcols=colorRampPalette(c("grey","blue","red","purple")) |
176 | 253 |
#hmcols=colorRampPalette(c(grey(.8),grey(.3),grey(.2))) |
177 |
tr=c(0,80)
|
|
254 |
tr=c(0,66)
|
|
178 | 255 |
colkey <- draw.colorkey(list(col = hmcols(tr[2]), at = tr[1]:tr[2],height=.25)) |
179 | 256 |
|
180 | 257 |
xyplot(cld_all~mod09|month2,data=cldm,panel=function(x,y,subscripts){ |
... | ... | |
190 | 267 |
# panel.abline(0,1,col="black",lwd=2) |
191 | 268 |
panel.abline(lm(y ~ x),col="black",lwd=2) |
192 | 269 |
# panel.ablineq(lm(y ~ x), r.sq = TRUE,at = 0.6,pos=1, offset=0,digits=2,col="blue") |
193 |
panel.text(70,10,bquote(paste(R^2,"=",.(round(summary(lm(y ~ x))$r.squared,2)))),cex=1.2)
|
|
270 |
panel.text(70,10,bquote(paste(R^2,"=",.(round(summary(lm(y ~ x))$r.squared,2)))),cex=1) |
|
194 | 271 |
},asp=1,scales=list(at=seq(0,100,len=6),useRaster=T,colorkey=list(width=.5,title="Number of Stations")), |
195 |
ylab="NDP Mean Cloud Amount (%)",xlab="MOD09 Cloud Frequency (%)",
|
|
272 |
ylab="NDP Mean Cloud Amount (%)",xlab="MODCF Cloud Frequency (%)",
|
|
196 | 273 |
legend= list(right = list(fun = colkey)))#+ layer(panel.abline(0,1,col="black",lwd=2)) |
197 | 274 |
|
198 | 275 |
|
199 |
## Monthly Climatologies |
|
200 |
## for(i in 1:2){ |
|
201 |
## p1=xyplot(cld~mod09|month2,data=cldm,cex=.2,pch=16,subscripts=T,ylab="NDP Mean Cloud Amount",xlab="MOD09 Cloud Frequency (%)")+ |
|
202 |
## layer(panel.lines(1:100,predict(lm(y~x),newdata=data.frame(x=1:100)),col="green"))+ |
|
203 |
## layer(panel.lines(1:100,predict(lm(y~x+I(x^2)),newdata=data.frame(x=1:100)),col="blue"))+ |
|
204 |
## layer(panel.abline(0,1,col="red")) |
|
205 |
## if(i==2){ |
|
206 |
## p1=p1+layer(panel.segments(mod09[subscripts],cld[subscripts]-cldsd[subscripts],mod09[subscripts],cld[subscripts]+cldsd[subscripts],subscripts=subscripts,col="grey"),data=cldm,under=T,magicdots=T) |
|
207 |
## p1=p1+layer(panel.segments(mod09[subscripts]-mod09sd[subscripts],cld[subscripts],mod09[subscripts]+mod09sd[subscripts],cld[subscripts],subscripts=subscripts,col="grey"),data=cldm,under=T,magicdots=T) |
|
208 |
## } |
|
209 |
## print(p1) |
|
210 |
## } |
|
211 |
|
|
212 | 276 |
bwplot(lulcc~difm,data=cldm,horiz=T,xlab="Difference (MOD09-Observed)",varwidth=T,notch=T)+layer(panel.abline(v=0)) |
213 |
bwplot(biome~difm,data=cldm,horiz=T,xlab="Difference (MOD09-Observed)",varwidth=T,notch=T)+layer(panel.abline(v=0)) |
|
214 |
|
|
215 |
#library(BayesFactor) |
|
216 |
#coplot(difm~month2|lulcc,data=cldm[!is.na(cldm$dif)&!is.na(cldm$lulcc),],panel = panel.smooth) |
|
217 |
|
|
218 | 277 |
|
219 | 278 |
dev.off() |
220 | 279 |
|
... | ... | |
223 | 282 |
## Compare with worldclim and NPP |
224 | 283 |
#wc=stack(as.list(paste("/mnt/data/jetzlab/Data/environ/global/worldclim/prec_",1:12,".bil",sep=""))) |
225 | 284 |
wc_map=stack(as.list(paste("/mnt/data/jetzlab/Data/environ/global/worldclim/bio_12.bil",sep=""))) |
285 |
wc_dem=stack(as.list(paste("/mnt/data/jetzlab/Data/environ/global/worldclim/alt.bil",sep=""))) |
|
226 | 286 |
|
227 | 287 |
regs=list( |
228 | 288 |
Cascades=extent(c(-122.8,-118,44.9,47)), |
... | ... | |
235 | 295 |
) |
236 | 296 |
|
237 | 297 |
|
238 |
#pdf("output/mod09_worldclim.pdf",width=11,height=8.5) |
|
239 |
#for(r in 1:length(regs)){ |
|
240 |
#tmap=crop(wc_map,regs[[r]]) |
|
241 |
#p_map=levelplot(tmap,col.regions=grey(seq(0,1,len=100)),cuts=100,at=seq(tmap@data@min,tmap@data@max,len=100),margin=F,maxpixels=1e5,colorkey=list(space="bottom",height=.75),xlab="",ylab="",main=names(regs)[r],useRaster=T) |
|
242 |
#tmac=crop(mod09a,regs[[r]]) |
|
243 |
#p_mac=levelplot(tmac,col.regions=grey(seq(0,1,len=100)),cuts=100,at=seq(tmac@data@min,tmac@data@max,len=100),margin=F,maxpixels=1e5,colorkey=list(space="bottom",height=.75),useRaster=T) |
|
244 |
#p_npp=levelplot(crop(npp,regs[[r]]),col.regions=grey(seq(0,1,len=100)),cuts=99,margin=F,maxpixels=1e5,colorkey=list(space="bottom",height=.5),zscaleLog=T,useRaster=T) #"NPP"=p_npp, |
|
245 |
#p3=c("MOD09 Cloud Frequency (%)"=p_mac,"WorldClim Mean Annual Precip (mm)"=p_map,x.same=T,y.same=T,merge.legends=T,layout=c(2,1)) |
|
246 |
#print(p3) |
|
247 |
#} |
|
248 |
#dev.off() |
|
249 |
|
|
250 |
|
|
251 |
## reduced resolution |
|
252 |
|
|
253 | 298 |
## read in GEWEX 1-degree data |
254 | 299 |
gewex=mean(brick("data/gewex/CA_PATMOSX_NOAA.nc",varname="a_CA")) |
255 | 300 |
names(gewex)="PATMOS-x GEWEX AVHRR" |
... | ... | |
258 | 303 |
#MOD_gewex=gewex |
259 | 304 |
#MOD_gewex@data@values=1:length(MOD_gewex@data@values) |
260 | 305 |
#MOD_gewex2=zonal(mod09a,MOD_gewex,fun='mean') |
261 |
png("output/Resolution_Figures_%03d.png",width=5500,height=2000,res=600,pointsize=36,bg="white")
|
|
306 |
png("output/Resolution_Figures_%03d.png",width=5500,height=4000,res=600,pointsize=36,bg="white")
|
|
262 | 307 |
trellis.par.set(my.theme) |
263 | 308 |
#pdf("output/mod09_resolution.pdf",width=11,height=8.5) |
264 | 309 |
|
265 | 310 |
r="Venezuela" |
266 | 311 |
# ylab.right = "Cloud Frequency (%)",par.settings = list(layout.widths = list(axis.key.padding = 0.1,axis.left=0.6,ylab.right = 3,right.padding=2)), |
312 |
pars=list(layout.heights=list(key.bottom=2,key.top=1),layout.widths = list(axis.key.padding = 3,axis.left=0.6)) |
|
267 | 313 |
p1=levelplot(crop(mod09a,regs[[r]]),col.regions=grey(seq(0,1,len=100)),at=seq(45,100,len=99), |
268 |
colorkey=list(space="bottom",width=1,height=1,labels=list(labels=c(50,75,100),at=c(50,75,100))), |
|
269 |
cuts=99,margin=F,max.pixels=1e5,par.settings = list(layout.heights=list(key.bottom=4),layout.widths = list(axis.key.padding = 0,axis.left=0.6))) |
|
270 |
p2=levelplot(crop(gewex,regs[[r]]),col.regions=grey(seq(0,1,len=100)),at=seq(.45,1,len=99),cuts=99,margin=F,max.pixels=1e5,colorkey=F) |
|
314 |
colorkey=list(space="top",width=1,height=.75,labels=list(labels=c(50,75,100),at=c(50,75,100))), |
|
315 |
cuts=99,margin=F,max.pixels=1e6,par.settings = pars) |
|
316 |
p2=levelplot(crop(gewex,regs[[r]]),col.regions=grey(seq(0,1,len=100)),at=seq(.45,1,len=99),cuts=99,margin=F,max.pixels=1e6, |
|
317 |
colorkey=list(space="top",width=1,height=.75,labels=list(labels=c(50,75,100),at=c(.50,.75,1))), |
|
318 |
par.settings = pars) |
|
271 | 319 |
tmap=crop(wc_map,regs[[r]]) |
272 |
p3=levelplot(tmap,col.regions=grey(seq(0,1,len=100)),cuts=100,at=seq(tmap@data@min,tmap@data@max,len=100),margin=F,maxpixels=1e5, |
|
273 |
colorkey=list(space="bottom",height=.33,width=1,labels=list(labels=c(1000,4000),at=c(1000,4000))),xlab="",ylab="",main=names(regs)[r],useRaster=T) |
|
274 |
print(c("MODCF (%)"=p1,"PATMOS-x GEWEX (%)"=p2,"WorldClim Precip (mm)"=p3,x.same=T,y.same=T,merge.legends=T,layout=c(3,1))) |
|
320 |
p3=levelplot(tmap,col.regions=grey(seq(0,1,len=100)),cuts=100,at=seq(tmap@data@min,tmap@data@max,len=100),margin=F,maxpixels=1e6, |
|
321 |
colorkey=list(space="bottom",height=.75,width=1),xlab="",ylab="",main=names(regs)[r],useRaster=T, |
|
322 |
par.settings = pars) |
|
323 |
p4=levelplot(crop(wc_dem,regs[[r]]),col.regions=grey(seq(0,1,len=100)),cuts=99,margin=F,max.pixels=1e6, |
|
324 |
colorkey=list(space="bottom",height=.75,width=1), |
|
325 |
par.settings = pars)#,labels=list(labels=c(1000,4000),at=c(1000,4000)))) |
|
326 |
print(c("MODCF (%)"=p1,"PATMOS-x GEWEX (%)"=p2,"WorldClim Precip (mm)"=p3,"Elevation (m)"=p4,x.same=T,y.same=T,merge.legends=T,layout=c(2,2))) |
|
327 |
|
|
275 | 328 |
|
276 | 329 |
dev.off() |
277 | 330 |
|
... | ... | |
279 | 332 |
######################################### |
280 | 333 |
### Some stats for paper |
281 | 334 |
|
335 |
## number of stations retained |
|
336 |
length(unique(cldm$StaID[!is.na(cldm$cld_all)])) |
|
337 |
length(unique(cldm$StaID[!is.na(cldm$cld)])) |
|
338 |
|
|
282 | 339 |
# approximate size of mod09ga archive - get total size for one day from the USGS website |
283 | 340 |
size=scan("http://e4ftl01.cr.usgs.gov/MOLT/MOD09GA.005/2000.04.30/",what="char") |
284 | 341 |
## extract all filesizes in MB (all the HDFs) and sum them and covert to TB for the length of the full record |
... | ... | |
292 | 349 |
|
293 | 350 |
summary(lm(cld_all~mod09+lat,data=cldm)) |
294 | 351 |
|
295 |
|
|
296 |
|
|
297 |
|
|
298 |
## assess latitude bias |
|
299 |
cldm$abslat=abs(cldm$lat) |
|
300 |
cldm$absdif=abs(cldm$difm) |
|
352 |
|
|
301 | 353 |
|
302 | 354 |
################################################################### |
303 | 355 |
### summary by biome |
... | ... | |
392 | 444 |
|
393 | 445 |
|
394 | 446 |
|
395 |
|
|
396 |
|
|
397 |
|
|
398 |
|
|
399 |
|
|
447 |
### Compare time periods |
|
400 | 448 |
library(texreg) |
401 | 449 |
extract.lm <- function(model) { |
402 | 450 |
s <- summary(model) |
... | ... | |
405 | 453 |
se <- s$coef[, 2] |
406 | 454 |
pval <- s$coef[, 4] |
407 | 455 |
rs <- s$r.squared |
408 |
n <- nobs(model)
|
|
456 |
n <- as.integer(nobs(model))
|
|
409 | 457 |
rmse=sqrt(mean((residuals(s)^2))) |
410 | 458 |
gof <- c(rs, rmse, n) |
411 | 459 |
gof.names <- c("R-Squared","RMSE","n") |
... | ... | |
420 | 468 |
|
421 | 469 |
|
422 | 470 |
### Compare two time periods |
423 |
lm_all1=lm(cld_all~mod09,data=cldm[!is.na(cldm$cld),]) |
|
424 |
lm_mod=lm(cld~mod09,data=cldm) |
|
425 |
mods=list("1970-2000 complete"=lm_all1,"2000-2009"=lm_mod) |
|
471 |
lm_all1=lm(cld_all~mod09,data=cldm[!is.na(cldm$cld)&cldm$cldn_all>=10,]) |
|
472 |
|
|
473 |
lm_mod=lm(cld~mod09,data=cldm[cldm$cldn==10,]) |
|
474 |
mods=list("1970-2009"=lm_all1,"2000-2009"=lm_mod) |
|
475 |
|
|
476 |
screenreg(mods,digits=2,single.row=T,custom.model.names=names(mods),custom.coef.names = c("Intercept", "MODCF")) |
|
426 | 477 |
|
427 |
screenreg(mods,digits=2,single.row=T,custom.model.names=names(mods)) |
|
428 | 478 |
htmlreg(mods,file = "output/tempstab.doc", |
429 | 479 |
custom.model.names = names(mods), |
430 | 480 |
single.row = T, inline.css = FALSE,doctype = TRUE, html.tag = TRUE, head.tag = TRUE, body.tag = TRUE) |
431 | 481 |
|
432 | 482 |
|
483 |
## assess latitude bias |
|
484 |
cldm$abslat=abs(cldm$lat) |
|
485 |
cldm$absdif=abs(cldm$difm) |
|
486 |
abslm=lm(absdif~abslat*I(abslat^2),data=cldm[cldm$cldn_all>30,]) |
|
433 | 487 |
|
434 |
abslm=lm(absdif~abslat:I(abslat^2),data=cldm)
|
|
488 |
xyplot(absdif~abslat|month2,type=c("p","smooth"),data=cldm,cex=.25,pch=16)
|
|
435 | 489 |
|
436 |
plot(absdif~abslat,data=cldm,cex=.25,pch=16) |
|
490 |
plot(absdif~abslat,data=cldm[cldm$cldn_all>30,],cex=.25,pch=16)
|
|
437 | 491 |
lines(0:90,predict(abslm,newdata=data.frame(abslat=0:90),type="response"),col="red") |
438 | 492 |
|
439 | 493 |
bf=anovaBF(dif~lulcc+month2,data=cldm[!is.na(cldm$dif)&!is.na(cldm$lulcc),]) |
climate/research/cloud/MOD09/NDP-026D.R | ||
---|---|---|
139 | 139 |
|
140 | 140 |
|
141 | 141 |
### Add biome data |
142 |
if(!file.exists("../teow/biomes.shp")){ |
|
143 |
teow=readOGR("/mnt/data/jetzlab/Data/environ/global/teow/official/","wwf_terr_ecos") |
|
144 |
teow=teow[teow$BIOME<90,] |
|
145 |
biome=unionSpatialPolygons(teow,paste(teow$REALM,teow$BIOME,sep="_"), threshold=5) |
|
146 |
biomeid=read.csv("/mnt/data/jetzlab/Data/environ/global/teow/official/biome.csv",stringsAsFactors=F) |
|
147 |
realmid=read.csv("/mnt/data/jetzlab/Data/environ/global/teow/official/realm.csv",stringsAsFactors=F,na.strings = "TTTT") |
|
148 |
dt=data.frame(code=row.names(biome),stringsAsFactors=F) |
|
149 |
dt[,c("realmid","biomeid")]=do.call(rbind,strsplit(sub(" ","",dt$code),"_")) |
|
150 |
dt$realm=realmid$realm[match(dt$realmid,realmid$realmid)] |
|
151 |
dt$biome=biomeid$BiomeID[match(dt$biomeid,biomeid$Biome)] |
|
152 |
row.names(dt)=row.names(biome) |
|
153 |
biome=SpatialPolygonsDataFrame(biome,data=dt) |
|
154 |
writeOGR(biome,"../teow","biomes",driver="ESRI Shapefile",overwrite=T) |
|
155 |
} |
|
156 | 142 |
biome=readOGR("../teow/","biomes") |
157 | 143 |
projection(biome)=projection(st) |
158 | 144 |
#st$biome=over(st,biome,returnList=F)$BiomeID |
climate/research/cloud/MOD09/biomes.R | ||
---|---|---|
1 |
## |
|
2 |
## Summarize clouds by biome |
|
3 |
|
|
4 |
setwd("~/acrobates/adamw/projects/cloud/") |
|
5 |
|
|
6 |
|
|
7 |
library(rgdal) |
|
8 |
library(raster) |
|
9 |
library(geosphere) |
|
10 |
library(rgeos) |
|
11 |
library(maptools) |
|
12 |
library(multicore) |
|
13 |
|
|
14 |
## create Biome shapefile |
|
15 |
if(!file.exists("data/teow/biomes.shp")){ |
|
16 |
teow=readOGR("/mnt/data/jetzlab/Data/environ/global/teow/official/","wwf_terr_ecos") |
|
17 |
biome=unionSpatialPolygons(teow,paste(teow$REALM,teow$BIOME,sep="_"), threshold=5) |
|
18 |
biomeid=read.csv("/mnt/data/jetzlab/Data/environ/global/teow/biome.csv",stringsAsFactors=F) |
|
19 |
realmid=read.csv("/mnt/data/jetzlab/Data/environ/global/teow/realm.csv",stringsAsFactors=F,na.strings = "TTTT") |
|
20 |
dt=data.frame(code=row.names(biome),stringsAsFactors=F) |
|
21 |
dt[,c("realmid","biomeid")]=do.call(rbind,strsplit(sub(" ","",dt$code),"_")) |
|
22 |
dt$realm=realmid$realm[match(dt$realmid,realmid$realmid)] |
|
23 |
dt$biome=biomeid$BiomeID[match(dt$biomeid,biomeid$Biome)] |
|
24 |
row.names(dt)=row.names(biome) |
|
25 |
biome=SpatialPolygonsDataFrame(biome,data=dt) |
|
26 |
## add area and centroid to each polygon |
|
27 |
biome$areakm=do.call(c,mclapply(1:length(biome),function(i) {print(i); return(areaPolygon(biome[i,])/1000000)})) |
|
28 |
biome@data[,c("lon","lat")]=coordinates(gCentroid(biome,byid=T)) |
|
29 |
writeOGR(biome,"data/teow","biomes",driver="ESRI Shapefile",overwrite=T) |
|
30 |
} |
|
31 |
|
|
32 |
|
|
33 |
biome=readOGR("data/teow/","biomes") |
|
34 |
projection(biome)="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" |
|
35 |
|
|
36 |
|
|
37 |
### load cloud data |
|
38 |
#mod09s=brick("data/cloud_yseasmean.nc",varname="CF");names(mod09s)=c("DJF","MAM","JJA","SON") |
|
39 |
#mod09c=brick("data/cloud_ymonmean.nc",varname="CF");names(mod09c)=month.name |
|
40 |
#mod09s=brick("data/cloud_ymonstd.nc",varname="CFsd");names(mod09s)=month.name |
|
41 |
|
|
42 |
mod09a=brick("data/cloud_mean.nc",varname="CF");names(mod09a)="Mean Annual Cloud Frequency" |
|
43 |
mod09min=brick("data/cloud_min.nc",varname="CF");names(mod09min)="Min Cloud Frequency" |
|
44 |
mod09max=brick("data/cloud_max.nc",varname="CF");names(mod09max)="Max Cloud Frequency" |
|
45 |
|
|
46 |
mod09inter=brick("data/cloud_std_inter.nc",varname="CF_sd");names(mod09inter)="Inter-annual SD" |
|
47 |
mod09intra=brick("data/cloud_std_intra.nc",varname="CFsd");names(mod09intra)="Intra-annual SD" |
|
48 |
|
|
49 |
|
|
50 |
## Extract biome-level summaries |
|
51 |
|
|
52 |
fsum=function(x,na.rm=T) paste(mean(x,na.rm=T),sd(x,na.rm=T),min(x,na.rm=T),max(x,na.rm=T),sep="_") |
|
53 |
fs=list(min=mod09min,max=mod09max,intersd=mod09inter,intrasd=mod09intra,mean=mod09a) |
|
54 |
fs=brick(list(min=mod09min,max=mod09max,intersd=mod09inter,intrasd=mod09intra,mean=mod09a)) |
|
55 |
|
|
56 |
biomed=do.call(rbind.data.frame, |
|
57 |
mclapply(1:length(fs),function(i) { |
|
58 |
print(i) |
|
59 |
td=extract(fs[[i]],biome,fun=fsum,df=F,sp=F) |
|
60 |
td2=do.call(rbind.data.frame,strsplit(td,"_")) |
|
61 |
td3=as.data.frame(apply(td2,2,as.numeric)) |
|
62 |
colnames(td3)=c("mean","sd","min","max") |
|
63 |
td3$metric=names(fs)[i] |
|
64 |
td3$biome=biome$code |
|
65 |
return(td3) |
|
66 |
})) |
|
67 |
write.csv(biomed,file="output/biomesummary.csv",row.names=F) |
|
68 |
|
|
69 |
################################################################ |
|
70 |
## get stratified sample of points from biomes for illustration |
|
71 |
if(!file.exists("output/biomesamplepoints.csv")){ |
|
72 |
biome=readOGR("data/teow/","biomes") |
|
73 |
n_biomesamples=1000 |
|
74 |
library(multicore) |
|
75 |
biomesample=do.call(rbind.data.frame,mclapply(1:length(biome),function(i) |
|
76 |
data.frame(code=biome$code[i],coordinates(spsample(biome[i,],n=n_biomesamples,type="stratified",nsig=2))))) |
|
77 |
colnames(biomesample)[2:3]=c("lon","lat") |
|
78 |
biomesample[,c("biome","realm")]=biome@data[match(biomesample$code,biome$code),c("biome","realm")] |
|
79 |
write.csv(biomesample,"output/biomesamplepoints.csv",row.names=F) |
|
80 |
} |
|
81 |
|
|
82 |
## Extract data for points |
|
83 |
if(!file.exists("output/biomesamplepoints_cloud.csv")){ |
|
84 |
biomesample=read.csv("output/biomesamplepoints.csv") |
|
85 |
biomep=raster::extract(mod09c,biomesample,sp=T) |
|
86 |
biomep$lon=biomesample$lon |
|
87 |
biomep@data[,c("lon","lat")]=coordinates(biomep) |
|
88 |
write.csv(biomep@data,"output/biomesamplepoints_cloud.csv",row.names=F) |
|
89 |
} |
|
90 |
biomep=read.csv("output/biomesamplepoints_cloud.csv") |
|
91 |
coordinates(biomep)=c("lon","lat") |
|
92 |
projection(biomep)="+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" |
climate/research/cloud/MOD09/ee/ee_compile.R | ||
---|---|---|
4 | 4 |
library(doMC) |
5 | 5 |
library(multicore) |
6 | 6 |
library(foreach) |
7 |
#library(doMPI) |
|
8 | 7 |
registerDoMC(4) |
9 |
#beginCluster(4) |
|
10 | 8 |
|
11 |
wd="~/acrobates/adamw/projects/cloud" |
|
12 |
setwd(wd) |
|
13 |
|
|
14 |
tempdir="tmp" |
|
15 |
if(!file.exists(tempdir)) dir.create(tempdir) |
|
9 |
setwd("~/acrobates/adamw/projects/cloud") |
|
16 | 10 |
|
17 | 11 |
## Get list of available files |
18 |
df=data.frame(path=list.files("/mnt/data2/projects/cloud/mod09",pattern="*.tif$",full=T,recur=T),stringsAsFactors=F) |
|
19 |
df[,c("region","year","month")]=do.call(rbind,strsplit(basename(df$path),"_|[.]"))[,c(1,2,3)] |
|
20 |
df$date=as.Date(paste(df$year,"_",df$month,"_15",sep=""),"%Y_%m_%d") |
|
21 |
|
|
22 |
## add stats to test for missing data |
|
23 |
addstats=F |
|
24 |
if(addstats){ |
|
25 |
df[,c("max","min","mean","sd")]=do.call(rbind.data.frame,mclapply(1:nrow(df),function(i) as.numeric(sub("^.*[=]","",grep("STATISTICS",system(paste("gdalinfo -stats",df$path[i]),inter=T),value=T))))) |
|
26 |
table(df$sd==0) |
|
12 |
df=data.frame(path=list.files("data/mod09cloud",pattern="*.tif$",full=T,recur=T),stringsAsFactors=F) |
|
13 |
df[,c("month")]=do.call(rbind,strsplit(basename(df$path),"_|[.]|-"))[,c(6)] |
|
14 |
df$date=as.Date(paste(2013,"_",df$month,"_15",sep=""),"%Y_%m_%d") |
|
15 |
|
|
16 |
## use ramdisk? |
|
17 |
tmpfs=tempdir() |
|
18 |
|
|
19 |
ramdisk=T |
|
20 |
if(ramdisk) { |
|
21 |
system("sudo mkdir -p /mnt/ram") |
|
22 |
system("sudo mount -t ramfs -o size=30g ramfs /mnt/ram") |
|
23 |
system("sudo chmod a+w /mnt/ram") |
|
24 |
tmpfs="/mnt/ram" |
|
25 |
} |
|
26 |
|
|
27 |
#i=2 |
|
28 |
for(i in 1:nrow(df)){ |
|
29 |
## Define output and check if it already exists |
|
30 |
temptffile=paste(tmpfs,"/modcf_",df$month[i],".tif",sep="") |
|
31 |
tffile=paste("data/mod09cloud2/modcf_",df$month[i],".tif",sep="") |
|
32 |
ncfile=paste("data/mod09cloud2/modcf_",df$month[i],".nc",sep="") |
|
33 |
|
|
34 |
# if(file.exists(tffile)) next |
|
35 |
## warp to wgs84 |
|
36 |
ops=paste("-t_srs 'EPSG:4326' -multi -r bilinear -te -180 -90 180 90 -tr 0.008333333333333 -0.008333333333333", |
|
37 |
"-co BIGTIFF=YES -ot Byte --config GDAL_CACHEMAX 300000 -wm 300000 -wo NUM_THREADS:10 -wo SOURCE_EXTRA=5") |
|
38 |
system(paste("gdalwarp -overwrite ",ops," ",df$path[i]," ",temptffile)) |
|
39 |
## update metadata |
|
40 |
tags=c(paste("TIFFTAG_IMAGEDESCRIPTION='Cloud Frequency extracted from Collection 5 MYD09GA and MOD09GA (PGE11) internal cloud mask algorithm (embedded in M*D09GA state_1km bit 10).", |
|
41 |
" Band 1 represents the overall 2000-2013 mean cloud frequency for month ",df$month[i], |
|
42 |
". Band 2 is the four times the standard deviation of the mean monthly cloud frequencies over 2000-2013.'",sep=""), |
|
43 |
"TIFFTAG_DOCUMENTNAME='MODCF: MODIS Cloud Frequency'", |
|
44 |
paste("TIFFTAG_DATETIME='2013-",df$month[i],"-15'",sep=""), |
|
45 |
"TIFFTAG_ARTIST='Adam M. Wilson (adam.wilson@yale.edu)'") |
|
46 |
## compress |
|
47 |
# trans_ops=paste(" -co COMPRESS=LZW -stats -co BLOCKXSIZE=256 -co BLOCKYSIZE=256 -co PREDICTOR=2 -co FORMAT=NC4C") |
|
48 |
# system(paste("gdal_translate ",trans_ops," ",paste("-mo ",tags,sep="",collapse=" ")," ",temptffile," ",tffile," ",sep="")) |
|
49 |
|
|
50 |
trans_ops=paste(" -co COMPRESS=DEFLATE -stats -co PREDICTOR=2 -co FORMAT=NC4C -co ZLEVEL=9") |
|
51 |
system(paste("gdal_translate ",trans_ops," ",temptffile," ",ncfile," ",sep="")) |
|
52 |
file.remove(temptffile) |
|
27 | 53 |
} |
28 | 54 |
|
29 |
## subset to testtiles? |
|
30 |
#df=df[df$region%in%testtiles,] |
|
31 |
#df=df[df$month==1,] |
|
32 |
table(df$year,df$month) |
|
33 | 55 |
|
34 |
writeLines(paste("Tiling options will produce",nrow(tiles),"tiles and ",nrow(jobs),"tile-months. Current todo list is ",length(todo))) |
|
56 |
if(ramdisk) { |
|
57 |
## unmount the ram disk |
|
58 |
system(paste("sudo umount ",tmpfs) |
|
59 |
} |
|
35 | 60 |
|
36 | 61 |
|
37 |
## drop some if not complete |
|
38 |
#df=df[df$month%in%1:9&df$year%in%c(2001:2012),] |
|
39 | 62 |
rerun=F # set to true to recalculate all dates even if file already exists |
40 | 63 |
|
41 |
## Loop over existing months to build composite netcdf files |
|
42 |
foreach(date=unique(df$date)) %dopar% { |
|
43 |
## get date |
|
44 |
print(date) |
|
45 |
## Define output and check if it already exists |
|
46 |
vrtfile=paste(tempdir,"/mod09_",date,".vrt",sep="") |
|
47 |
ncfile=paste(tempdir,"/mod09_",date,".nc",sep="") |
|
48 |
tffile=paste(tempdir,"/mod09_",date,".tif",sep="") |
|
49 |
|
|
50 |
if(!rerun&file.exists(ncfile)) return(NA) |
|
51 |
## merge regions to a new netcdf file |
|
52 |
# system(paste("gdal_merge.py -o ",tffile," -init -32768 -n -32768.000 -ot Int16 ",paste(df$path[df$date==date],collapse=" "))) |
|
53 |
system(paste("gdalbuildvrt -overwrite -srcnodata -32768 -vrtnodata -32768 ",vrtfile," ",paste(df$path[df$date==date],collapse=" "))) |
|
54 |
## Warp to WGS84 grid and convert to netcdf |
|
55 |
##ops="-t_srs 'EPSG:4326' -multi -r cubic -te -180 0 180 10 -tr 0.008333333333333 -0.008333333333333 -wo SOURCE_EXTRA=50" #-wo SAMPLE_GRID=YES -wo SAMPLE_STEPS=100 |
|
56 |
ops="-t_srs 'EPSG:4326' -multi -r cubic -te -180 -90 180 90 -tr 0.008333333333333 -0.008333333333333 -wo SOURCE_EXTRA=50" |
|
57 |
|
|
58 |
system(paste("gdalwarp -overwrite ",ops," -et 0 -srcnodata -32768 -dstnodata -32768 ",vrtfile," ",tffile," -ot Int16")) |
|
59 |
system(paste("gdal_translate -of netCDF ",tffile," ",ncfile)) |
|
60 |
# system(paste("gdalwarp -overwrite ",ops," -srcnodata -32768 -dstnodata -32768 -of netCDF ",vrtfile," ",tffile," -ot Int16")) |
|
61 |
file.remove(tffile) |
|
62 |
setwd(wd) |
|
63 |
system(paste("ncecat -O -u time ",ncfile," ",ncfile,sep="")) |
|
64 |
## create temporary nc file with time information to append to MOD06 data |
|
65 |
cat(paste(" |
|
64 |
## Loop over existing months to build composite netcdf files |
|
65 |
foreach(date=unique(df$date)) %dopar% { |
|
66 |
## get date |
|
67 |
print(date) |
|
68 |
## Define output and check if it already exists |
|
69 |
temptffile=paste(tmpfs,"/modcf_",df$month[i],".tif",sep="") |
|
70 |
ncfile=paste("data/mod09cloud2/modcf_",df$month[i],".nc",sep="") |
|
71 |
## check if output already exists |
|
72 |
if(!rerun&file.exists(ncfile)) return(NA) |
|
73 |
|
|
74 |
## warp to wgs84 |
|
75 |
ops=paste("-t_srs 'EPSG:4326' -multi -r bilinear -te -180 -90 180 90 -tr 0.008333333333333 -0.008333333333333", |
|
76 |
"-co BIGTIFF=YES -ot Byte --config GDAL_CACHEMAX 300000 -wm 300000 -wo NUM_THREADS:10 -wo SOURCE_EXTRA=5") |
|
77 |
system(paste("gdalwarp -overwrite ",ops," ",df$path[i]," ",temptffile)) |
|
78 |
|
|
79 |
## Warp to WGS84 grid and convert to netcdf |
|
80 |
trans_ops=paste(" -co COMPRESS=DEFLATE -stats -co FORMAT=NC4C -co ZLEVEL=9") |
|
81 |
system(paste("gdal_translate -of netCDF ",trans_ops," ",temptffile," ",ncfile)) |
|
82 |
## file.remove(temptffile) |
|
83 |
system(paste("ncecat -O -u time ",ncfile," ",ncfile,sep="")) |
|
84 |
## create temporary nc file with time information to append to CF data |
|
85 |
cat(paste(" |
|
66 | 86 |
netcdf time { |
67 | 87 |
dimensions: |
68 | 88 |
time = 1 ; |
... | ... | |
73 | 93 |
time:long_name = \"time of observation\"; |
74 | 94 |
data: |
75 | 95 |
time=",as.integer(date-as.Date("2000-01-01")),"; |
76 |
}"),file=paste(tempdir,"/",date,"_time.cdl",sep="")) |
|
77 |
system(paste("ncgen -o ",tempdir,"/",date,"_time.nc ",tempdir,"/",date,"_time.cdl",sep="")) |
|
78 |
system(paste("ncks -A ",tempdir,"/",date,"_time.nc ",ncfile,sep="")) |
|
79 |
## add other attributes |
|
80 |
system(paste("ncrename -v Band1,CF ",ncfile,sep="")) |
|
81 |
system(paste("ncatted ", |
|
82 |
" -a units,CF,o,c,\"%\" ", |
|
83 |
# " -a valid_range,CF,o,b,\"0,100\" ", |
|
84 |
" -a scale_factor,CF,o,f,\"0.1\" ", |
|
85 |
" -a _FillValue,CF,o,f,\"-32768\" ", |
|
86 |
" -a long_name,CF,o,c,\"Cloud Frequency(%)\" ", |
|
87 |
" -a NETCDF_VARNAME,CF,o,c,\"Cloud Frequency(%)\" ", |
|
88 |
" -a title,global,o,c,\"Cloud Climatology from MOD09 Cloud Mask\" ", |
|
89 |
" -a institution,global,o,c,\"Jetz Lab, EEB, Yale University, New Haven, CT\" ", |
|
90 |
" -a source,global,o,c,\"Derived from MOD09GA Daily Data\" ", |
|
91 |
" -a comment,global,o,c,\"Developed by Adam M. Wilson (adam.wilson@yale.edu / http://adamwilson.us)\" ", |
|
92 |
ncfile,sep="")) |
|
93 |
|
|
94 |
## add the fillvalue attribute back (without changing the actual values) |
|
95 |
#system(paste("ncatted -a _FillValue,CF,o,b,-32768 ",ncfile,sep="")) |
|
96 |
|
|
97 |
if(as.numeric(system(paste("cdo -s ntime ",ncfile),intern=T))<1) { |
|
98 |
print(paste(ncfile," has no time, deleting")) |
|
99 |
file.remove(ncfile) |
|
100 |
} |
|
101 |
print(paste(basename(ncfile)," Finished")) |
|
102 |
|
|
103 |
|
|
104 |
} |
|
96 |
}"),file=paste(tempdir(),"/",date,"_time.cdl",sep="")) |
|
97 |
system(paste("ncgen -o ",tempdir(),"/",date,"_time.nc ",tempdir(),"/",date,"_time.cdl",sep="")) |
|
98 |
## add time dimension to ncfile and compress (deflate) |
|
99 |
system(paste("ncks --fl_fmt=netcdf4_classic -L 9 -A ",tempdir(),"/",date,"_time.nc ",ncfile,sep="")) |
|
100 |
## add other attributes |
|
101 |
system(paste("ncrename -v Band1,CF ",ncfile,sep="")) |
|
102 |
system(paste("ncrename -v Band2,CFsd ",ncfile,sep="")) |
|
103 |
system(paste("ncatted ", |
|
104 |
## CF Mean |
|
105 |
" -a units,CF,o,c,\"%\" ", |
|
106 |
" -a valid_range,CF,o,b,\"0,100\" ", |
|
107 |
# " -a scale_factor,CF,o,b,\"0.1\" ", |
|
108 |
" -a _FillValue,CF,o,b,\"255\" ", |
|
109 |
" -a missing_value,CF,o,b,\"255\" ", |
|
110 |
" -a long_name,CF,o,c,\"Cloud Frequency (%)\" ", |
|
111 |
" -a NETCDF_VARNAME,CF,o,c,\"Cloud Frequency (%)\" ", |
|
112 |
## CF Standard Deviation |
|
113 |
" -a units,CFsd,o,c,\"SD\" ", |
|
114 |
" -a valid_range,CFsd,o,b,\"0,200\" ", |
|
115 |
" -a scale_factor,CFsd,o,f,\"0.25\" ", |
|
116 |
" -a _FillValue,CFsd,o,b,\"255\" ", |
|
117 |
" -a missing_value,CFsd,o,b,\"255\" ", |
|
118 |
" -a long_name,CFsd,o,c,\"Cloud Frequency (%) Intra-month (2000-2013) Standard Deviation\" ", |
|
119 |
" -a NETCDF_VARNAME,CFsd,o,c,\"Cloud Frequency (%) Intra-month (2000-2013) Standard Deviation\" ", |
|
120 |
## global |
|
121 |
" -a title,global,o,c,\"Cloud Climatology from MOD09 Cloud Mask\" ", |
|
122 |
" -a institution,global,o,c,\"Jetz Lab, EEB, Yale University, New Haven, CT\" ", |
|
123 |
" -a source,global,o,c,\"Derived from MOD09GA Daily Data\" ", |
|
124 |
" -a comment,global,o,c,\"Developed by Adam M. Wilson (adam.wilson@yale.edu / http://adamwilson.us)\" ", |
|
125 |
ncfile,sep="")) |
|
126 |
|
|
127 |
## add the fillvalue attribute back (without changing the actual values) |
|
128 |
#system(paste("ncatted -a _FillValue,CF,o,b,-32768 ",ncfile,sep="")) |
|
129 |
|
|
130 |
if(as.numeric(system(paste("cdo -s ntime ",ncfile),intern=T))<1) { |
|
131 |
print(paste(ncfile," has no time, deleting")) |
|
132 |
file.remove(ncfile) |
|
133 |
} |
|
134 |
print(paste(basename(ncfile)," Finished")) |
|
135 |
|
|
136 |
|
|
137 |
} |
|
105 | 138 |
|
106 | 139 |
|
107 | 140 |
### merge all the tiles to a single global composite |
... | ... | |
122 | 155 |
system(paste("cdo -f nc4c -O -yseasstd data/cloud_monthly.nc data/cloud_yseasstd.nc")) |
123 | 156 |
|
124 | 157 |
## standard deviations, had to break to limit memory usage |
125 |
system(paste("cdo -f nc4c -O -chname,CF,CF_sd -ymonstd -selmon,1,2,3,4,5,6 data/cloud_monthly.nc data/cloud_ymonsd_1-6.nc")) |
|
126 |
system(paste("cdo -f nc4c -O -chname,CF,CF_sd -ymonstd -selmon,7,8,9,10,11,12 data/cloud_monthly.nc data/cloud_ymonsd_7-12.nc")) |
|
127 |
system(paste("cdo -f nc4c -O mergetime data/cloud_ymonsd_1-6.nc data/cloud_ymonsd_7-12.nc data/cloud_ymonstd.nc")) |
|
128 |
|
|
129 |
|
|
130 |
#if(!file.exists("data/mod09_metrics.nc")) { |
|
131 |
system("cdo -f nc4c timmin data/cloud_ymonmean.nc data/cloud_min.nc") |
|
132 |
system("cdo -f nc4c timmax data/cloud_ymonmean.nc data/cloud_max.nc") |
|
133 |
system("cdo -f nc4c -chname,CF,CFsd -timstd data/cloud_ymonmean.nc data/cloud_std.nc") |
|
134 |
# system("cdo -f nc2 merge data/mod09_std.nc data/mod09_min.nc data/cloud_max.nc data/cloud_metrics.nc") |
|
135 |
# system("cdo merge -chname,CF,CFmin -timmin data/cloud_ymonmean.nc -chname,CF,CFmax -timmax data/cloud_ymonmean.nc -chname,CF,CFsd -timstd data/cloud_ymonmean.nc data/cloud_metrics.nc") |
|
136 |
#} |
|
158 |
system(paste("cdo -f nc4c -z zip -O -chname,CF,CF_sd -ymonstd -selmon,1,2,3,4,5,6 data/cloud_monthly.nc data/cloud_ymonsd_1-6.nc")) |
|
159 |
system(paste("cdo -f nc4c -z zip -O -chname,CF,CF_sd -ymonstd -selmon,7,8,9,10,11,12 data/cloud_monthly.nc data/cloud_ymonsd_7-12.nc")) |
|
160 |
system(paste("cdo -f nc4c -z zip -O mergetime data/cloud_ymonsd_1-6.nc data/cloud_ymonsd_7-12.nc data/cloud_ymonstd.nc")) |
|
161 |
|
|
162 |
system("cdo -f nc4c -z zip timmin data/cloud_ymonmean.nc data/cloud_min.nc") |
|
163 |
system("cdo -f nc4c -z zip timmax data/cloud_ymonmean.nc data/cloud_max.nc") |
|
164 |
|
|
165 |
## standard deviation of mean monthly values give intra-annual variability |
|
166 |
system("cdo -f nc4c -z zip -chname,CF,CFsd -timstd data/cloud_ymonmean.nc data/cloud_std_intra.nc") |
|
167 |
## mean of monthly standard deviations give inter-annual variability |
|
168 |
system("cdo -f nc4c -z zip -chname,CF,CFsd -timmean data/cloud_ymonstd.nc data/cloud_std_inter.nc") |
|
137 | 169 |
|
138 | 170 |
|
139 | 171 |
# Regressions through time by season |
Also available in: Unified diff
updating postprocessing to produce netcdf rather than geotif