Revision 39c07803
Added by Adam Wilson almost 11 years ago
climate/research/cloud/MOD09/2_bias.R | ||
---|---|---|
1 |
### Script to compile the monthly cloud data from earth engine into a netcdf file for further processing |
|
2 |
|
|
3 |
library(rasterVis) |
|
4 |
library(doMC) |
|
5 |
library(multicore) |
|
6 |
library(foreach) |
|
7 |
library(mgcv) |
|
8 |
library(RcppOctave) |
|
9 |
registerDoMC(12) |
|
10 |
|
|
11 |
|
|
12 |
## start raster cluster |
|
13 |
#beginCluster(5) |
|
14 |
|
|
15 |
setwd("~/acrobates/adamw/projects/cloud") |
|
16 |
|
|
17 |
datadir="/mnt/data2/projects/cloud/" |
|
18 |
|
|
19 |
mpath="/home/adamw/acrobates/adamw/projects/environmental-layers/climate/research/cloud/MOD09/vsnr/" |
|
20 |
.O$addpath(mpath) |
|
21 |
## download data from google drive |
|
22 |
|
|
23 |
|
|
24 |
######################################### |
|
25 |
#### Bias correction functions |
|
26 |
|
|
27 |
fgabor=function(d,theta=-15,x=200,y=5){ |
|
28 |
thetaR=(theta*pi)/180 |
|
29 |
cds=expand.grid(x=(1:nrow(d))-round(nrow(d)/2),y=(1:ncol(d))-round(ncol(d)/2)) |
|
30 |
sigma_x=x |
|
31 |
sigma_y=y |
|
32 |
lambda=0 |
|
33 |
tpsi=0 |
|
34 |
x_theta=cds[,"x"]*cos(thetaR)+cds[,"y"]*sin(thetaR); |
|
35 |
y_theta=-cds[,"x"]*sin(thetaR)+cds[,"y"]*cos(thetaR); |
|
36 |
n=max(cds) |
|
37 |
gb= 1/(2*pi*sigma_x *sigma_y) * exp(-.5*(x_theta^2/sigma_x^2+y_theta^2/sigma_y^2))*cos(2*pi/n*lambda*x_theta+tpsi); |
|
38 |
gb2=1e-2*gb/max(gb); #Normalization |
|
39 |
psi=d |
|
40 |
values(psi)=matrix(gb2,ncol=ncol(d)) |
|
41 |
return(psi) |
|
42 |
} |
|
43 |
|
|
44 |
|
|
45 |
vsnr=function(d,gabor,alpha=10,p=2,epsilon=0,prec=5e-3,maxit=100,C1=1){ |
|
46 |
# d=d*1.0 |
|
47 |
## Process with VSNR |
|
48 |
dt=.O$VSNR(as.matrix(d),epsilon,p,as.matrix(gabor),alpha,maxit+1,prec,1,argout = c("u", "Gap", "Primal","Dual","EstP","EstD")); |
|
49 |
## Create spatial objects from VSNR output |
|
50 |
bias=d |
|
51 |
values(bias)=-as.numeric(t(convolve(dt$EstP,as.matrix(gabor)))) |
|
52 |
bias[abs(bias)<0.5]=0 # set all bias<0.5 to zero to avoit minor adjustment and added striping in data |
|
53 |
dc=d-bias # Make 'corrected' version of data |
|
54 |
NAvalue(bias)=0 #set bias==0 to NA to facilate plotting, areas that are NA were not adjusted |
|
55 |
res=stack(list(dc=dc,d=d,bias=bias)) #,EstP=EstP,EstD1=EstD1,EstD2=EstD2 |
|
56 |
rm(dt,dc) |
|
57 |
return(res) |
|
58 |
} |
|
59 |
|
|
60 |
|
|
61 |
###################################### |
|
62 |
## Run the correction functions |
|
63 |
### Subset equitorial region to correct orbital banding |
|
64 |
|
|
65 |
|
|
66 |
### build table of tiles to process |
|
67 |
extf=function(xmin=-180,xmax=180,ymin=-30,ymax=30,size=10,overlap=0.5){ |
|
68 |
xmins=unique(sort(c(seq(xmin,xmax-size,by=size),seq(xmin+(overlap*size),xmax-size,by=size)))) |
|
69 |
ymins=unique(sort(c(seq(ymin,ymax-size,by=size),seq(ymin+(overlap*size),ymax-size,by=size)))) |
|
70 |
exts=expand.grid(xmin=xmins,ymin=ymins) |
|
71 |
exts$ymax=exts$ymin+size |
|
72 |
exts$xmax=exts$xmin+size |
|
73 |
return(exts) |
|
74 |
} |
|
75 |
|
|
76 |
df2=list.files(paste(datadir,"/mcd09tif",sep=""),full=T,pattern="[0-9][.]tif$") |
|
77 |
i=1 |
|
78 |
|
|
79 |
### Build the tiles |
|
80 |
#exts=extf(xmin=-180,xmax=180,ymin=-30,ymax=30,size=10,overlap=0.5) |
|
81 |
exts=extf(xmin=-10,xmax=20,ymin=0,ymax=30,size=10,overlap=0) |
|
82 |
|
|
83 |
|
|
84 |
### Process the tiles |
|
85 |
foreach(ti=1:nrow(exts)) %dopar% { |
|
86 |
writeLines(paste("Starting: ",ti)) |
|
87 |
textent=extent(exts$xmin[ti],exts$xmax[ti],exts$ymin[ti],exts$ymax[ti]) |
|
88 |
## extract the tile |
|
89 |
file=df2[i] |
|
90 |
d=crop(raster(file),textent) |
|
91 |
## skip null tiles |
|
92 |
if(is.null(d@data@values)) return(NULL) |
|
93 |
## make the gabor kernel |
|
94 |
psi=fgabor(d,theta=-15,x=400,y=4) #3 |
|
95 |
## run the correction |
|
96 |
res=vsnr(d,gabor=psi,alpha=2,p=2,epsilon=1,prec=5e-3,maxit=30,C=1) |
|
97 |
## write the file |
|
98 |
outfile=paste("tmp/test_",ti,".tif",sep="") |
|
99 |
if(!is.null(outfile)) writeRaster(res,file=outfile,overwrite=T,datatype='FLT4S')#,options=c("COMPRESS=LZW", "PREDICTOR=2"))#,NAflag=-127) |
|
100 |
#,datatype='INT1S' |
|
101 |
} |
|
102 |
|
|
103 |
|
|
104 |
## mosaic the tiles |
|
105 |
system("ls tmp/test_[0-9]*[.]tif") |
|
106 |
system("rm tmp/test2.tif; gdal_merge.py -co COMPRESS=LZW -co PREDICTOR=2 'tmp/test_[0-9]*[.]tif' -o tmp/test2.tif") |
|
107 |
#system("rm tmp/test2.tif; gdalwarp -multi -r average -dstnodata 255 -ot Byte -co COMPRESS=LZW -co PREDICTOR=2 `ls tmp/test_[0-9]*[.]tif` tmp/test2.tif") |
|
108 |
|
|
109 |
res=brick("tmp/test2.tif") |
|
110 |
names(res)=c("d","dc","dif") |
|
111 |
|
|
112 |
|
|
113 |
gcol=colorRampPalette(c("blue","yellow","red")) |
|
114 |
levelplot(res[["bias"]],col.regions=gcol(100),cuts=99,margin=F,maxpixels=1e6) |
|
115 |
levelplot(stack(list(Original=res[["d"]],Corrected=res[["dc"]])),col.regions=gcol(100),cuts=99,maxpixels=1e7) |
|
116 |
levelplot(stack(list(Original=res[["d"]],Corrected=res[["dc"]])),col.regions=gcol(100),cuts=99,maxpixels=1e7,ylim=c(10,13),xlim=c(-7,-1)) |
|
117 |
|
climate/research/cloud/MOD09/ee/ee_compile.R | ||
---|---|---|
16 | 16 |
|
17 | 17 |
datadir="/mnt/data2/projects/cloud/" |
18 | 18 |
|
19 |
mpath="/home/adamw/acrobates/adamw/projects/environmental-layers/climate/research/cloud/MOD09/vsnr/" |
|
20 |
.O$addpath(mpath) |
|
21 |
## download data from google drive |
|
22 |
|
|
23 | 19 |
|
24 | 20 |
|
25 | 21 |
## Get list of available files |
... | ... | |
94 | 90 |
} |
95 | 91 |
|
96 | 92 |
|
97 |
######################################### |
|
98 |
#### Bias correction functions |
|
99 |
|
|
100 |
fgabor=function(d,theta=-15,x=200,y=5){ |
|
101 |
thetaR=(theta*pi)/180 |
|
102 |
cds=expand.grid(x=(1:nrow(d))-round(nrow(d)/2),y=(1:ncol(d))-round(ncol(d)/2)) |
|
103 |
sigma_x=x |
|
104 |
sigma_y=y |
|
105 |
lambda=0 |
|
106 |
tpsi=0 |
|
107 |
x_theta=cds[,"x"]*cos(thetaR)+cds[,"y"]*sin(thetaR); |
|
108 |
y_theta=-cds[,"x"]*sin(thetaR)+cds[,"y"]*cos(thetaR); |
|
109 |
n=max(cds) |
|
110 |
gb= 1/(2*pi*sigma_x *sigma_y) * exp(-.5*(x_theta^2/sigma_x^2+y_theta^2/sigma_y^2))*cos(2*pi/n*lambda*x_theta+tpsi); |
|
111 |
gb2=1e-2*gb/max(gb); #Normalization |
|
112 |
psi=d |
|
113 |
values(psi)=matrix(gb2,ncol=ncol(d)) |
|
114 |
return(psi) |
|
115 |
} |
|
116 |
|
|
117 |
|
|
118 |
vsnr=function(d,gabor,alpha=10,p=2,epsilon=0,prec=5e-3,maxit=100,C1=1){ |
|
119 |
## Process with VSNR |
|
120 |
dt=.O$VSNR(as.matrix(d),epsilon,p,as.matrix(gabor),alpha,maxit+1,prec,1,argout = c("u", "Gap", "Primal","Dual","EstP","EstD")); |
|
121 |
## Create spatial objects from VSNR output |
|
122 |
dc=d; |
|
123 |
values(dc)=round(as.vector(t(dt$u))) |
|
124 |
dc[dc<0]=0 |
|
125 |
# EstP=d; |
|
126 |
# values(EstP)=as.numeric(t(dt$EstP)) |
|
127 |
# EstD1=d; |
|
128 |
# values(EstD1)=as.numeric(t(dt$EstD[,,1])) |
|
129 |
# EstD2=d; |
|
130 |
# values(EstD2)=as.numeric(t(dt$EstD[,,2])) |
|
131 |
dif=d-dc#overlay(d,dc,fun=function(x,y) as.integer(x-y)) |
|
132 |
res=stack(list(d=d,dc=dc,dif=dif)) #,EstP=EstP,EstD1=EstD1,EstD2=EstD2 |
|
133 |
# rm(dt,dc,EstP,dif) |
|
134 |
return(res) |
|
135 |
} |
|
136 |
|
|
137 |
|
|
138 |
###################################### |
|
139 |
## Run the correction functions |
|
140 |
### Subset equitorial region to correct orbital banding |
|
141 |
|
|
142 |
df2=list.files(paste(datadir,"/mcd09tif",sep=""),full=T,pattern="[0-9][.]tif$") |
|
143 |
i=1 |
|
144 |
|
|
145 |
### build table of tiles to process |
|
146 |
tsize=10 |
|
147 |
exts=data.frame(expand.grid(xmin=seq(-180,180-tsize,by=tsize),ymin=seq(-30,30-tsize,by=tsize))) |
|
148 |
exts=data.frame(expand.grid(xmin=seq(-20,50-tsize,by=tsize),ymin=seq(-30,30-tsize,by=tsize))) |
|
149 |
#exts=data.frame(expand.grid(xmin=seq(-10,20-tsize,by=tsize),ymin=seq(0,30-tsize,by=tsize))) |
|
150 |
|
|
151 |
exts$xmax=exts$xmin+tsize |
|
152 |
exts$ymax=exts$ymin+tsize |
|
153 |
|
|
154 |
getbias=function(file=df2[i],extent,writefile=NULL,...){ |
|
155 |
## extract the tile |
|
156 |
d=crop(raster(file),extent) |
|
157 |
## skip null tiles |
|
158 |
if(is.null(d@data@values)) return(NULL) |
|
159 |
## make the gabor kernel |
|
160 |
psi=fgabor(d,theta=-15,x=500,y=3) |
|
161 |
## run the correction |
|
162 |
res=vsnr(d,gabor=psi,alpha=1,p=2,epsilon=1,prec=5e-3,maxit=100,C=1) |
|
163 |
if(!is.null(writefile)) writeRaster(res,file=writefile,overwrite=T,datatype='INT1S',options=c("COMPRESS=LZW", "PREDICTOR=2"),NAflag=-127) |
|
164 |
return(res) |
|
165 |
} |
|
166 |
|
|
167 |
### Process the tiles |
|
168 |
foreach(ti=1:nrow(exts)) %dopar% { |
|
169 |
writeLines(paste("Starting: ",ti)) |
|
170 |
textent=extent(exts$xmin[ti],exts$xmax[ti],exts$ymin[ti],exts$ymax[ti]) |
|
171 |
res=getbias(df2[i],extent=textent,writefile=paste("tmp/test_",ti,".tif",sep="")) |
|
172 |
} |
|
173 |
|
|
174 |
|
|
175 |
## mosaic the tiles |
|
176 |
system("ls tmp/test_[0-9]*[.]tif") |
|
177 |
system("rm tmp/test2.tif; gdal_merge.py -n 255 -ot Byte -co COMPRESS=LZW -co PREDICTOR=2 'tmp/test_[0-9]*[.]tif' -o tmp/test2.tif") |
|
178 |
|
|
179 |
res=brick("tmp/test2.tif") |
|
180 |
names(res)=c("d","dc","dif") |
|
181 |
|
|
182 |
p2=levelplot(res[["dif"]],col.regions=rainbow(100,start=.2),cuts=99,margin=F,maxpixels=1e6);print(p2) |
|
183 |
|
|
184 |
p1=levelplot(stack(list(Original=res[["d"]],Corrected=res[["dc"]])),col.regions=grey.colors(100,start=0),cuts=99,maxPixels=1e7);print(p1) |
|
185 |
|
|
186 |
|
|
187 |
|
|
188 |
print(c(p1,p2,merge.legends=T)) |
|
189 | 93 |
|
190 | 94 |
|
191 | 95 |
|
Also available in: Unified diff
Splitting cloud processing into two scripts