1 |
e64dc6d0
|
Benoit Parmentier
|
runGAMCAI <- function(i) { # loop over dates
|
2 |
|
|
|
3 |
|
|
#date<-strptime(dates[i], "%Y%m%d") # interpolation date being processed
|
4 |
d13c7dc1
|
Benoit Parmentier
|
date<-strptime(sampling_dat$date[i], "%Y%m%d") # interpolation date being processed, converting the string using specific format
|
5 |
e64dc6d0
|
Benoit Parmentier
|
month<-strftime(date, "%m") # current month of the date being processed
|
6 |
d13c7dc1
|
Benoit Parmentier
|
LST_month<-paste("mm_",month,sep="") # name of LST month to be matched in the raster stack of covariates and data.frame
|
7 |
e64dc6d0
|
Benoit Parmentier
|
|
8 |
|
|
#Adding layer LST to the raster stack
|
9 |
|
|
|
10 |
d13c7dc1
|
Benoit Parmentier
|
pos<-match("LST",layerNames(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
11 |
|
|
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
12 |
e64dc6d0
|
Benoit Parmentier
|
pos<-match(LST_month,layerNames(s_raster)) #Find column with the current month for instance mm12
|
13 |
|
|
r1<-raster(s_raster,layer=pos) #Select layer from stack
|
14 |
|
|
layerNames(r1)<-"LST"
|
15 |
|
|
s_raster<-addLayer(s_raster,r1) #Adding current month
|
16 |
|
|
|
17 |
|
|
###Regression part 1: Creating a validation dataset by creating training and testing datasets
|
18 |
|
|
|
19 |
|
|
mod_LST <-ghcn.subsets[[i]][,match(LST_month, names(ghcn.subsets[[i]]))] #Match interpolation date and monthly LST average
|
20 |
d13c7dc1
|
Benoit Parmentier
|
ghcn.subsets[[i]] <- transform(ghcn.subsets[[i]],LST = mod_LST) #Add the variable LST to the subset dataset
|
21 |
|
|
dst$LST<-dst[[LST_month]] #Add also to monthly dataset
|
22 |
|
|
|
23 |
e64dc6d0
|
Benoit Parmentier
|
#n<-nrow(ghcn.subsets[[i]])
|
24 |
|
|
#ns<-n-round(n*prop) #Create a sample from the data frame with 70% of the rows
|
25 |
|
|
#nv<-n-ns #create a sample for validation with prop of the rows
|
26 |
|
|
#ind.training <- sample(nrow(ghcn.subsets[[i]]), size=ns, replace=FALSE) #This selects the index position for 70% of the rows taken randomly
|
27 |
|
|
ind.training<-sampling[[i]]
|
28 |
|
|
ind.testing <- setdiff(1:nrow(ghcn.subsets[[i]]), ind.training)
|
29 |
|
|
data_s <- ghcn.subsets[[i]][ind.training, ] #Training dataset currently used in the modeling
|
30 |
|
|
data_v <- ghcn.subsets[[i]][ind.testing, ] #Testing/validation dataset using input sampling
|
31 |
|
|
|
32 |
|
|
ns<-nrow(data_s)
|
33 |
|
|
nv<-nrow(data_v)
|
34 |
|
|
#i=1
|
35 |
|
|
date_proc<-sampling_dat$date[i]
|
36 |
|
|
date_proc<-strptime(sampling_dat$date[i], "%Y%m%d") # interpolation date being processed
|
37 |
|
|
mo<-as.integer(strftime(date_proc, "%m")) # current month of the date being processed
|
38 |
|
|
day<-as.integer(strftime(date_proc, "%d"))
|
39 |
|
|
year<-as.integer(strftime(date_proc, "%Y"))
|
40 |
|
|
|
41 |
|
|
datelabel=format(ISOdate(year,mo,day),"%b %d, %Y")
|
42 |
|
|
|
43 |
|
|
###########
|
44 |
d13c7dc1
|
Benoit Parmentier
|
# STEP 1 - LST 10 year monthly averages: THIS IS NOT USED IN CAI method
|
45 |
e64dc6d0
|
Benoit Parmentier
|
###########
|
46 |
|
|
|
47 |
|
|
themolst<-raster(molst,mo) #current month being processed saved in a raster image
|
48 |
|
|
plot(themolst)
|
49 |
|
|
|
50 |
|
|
###########
|
51 |
|
|
# STEP 2 - Weather station means across same days: Monthly mean calculation
|
52 |
|
|
###########
|
53 |
|
|
|
54 |
|
|
modst=dst[dst$month==mo,] #Subsetting dataset for the relevant month of the date being processed
|
55 |
|
|
|
56 |
|
|
##########
|
57 |
|
|
# STEP 3 - get LST at stations
|
58 |
|
|
##########
|
59 |
|
|
|
60 |
|
|
sta_lola=modst[,c("lon","lat")] #Extracting locations of stations for the current month..
|
61 |
|
|
|
62 |
|
|
proj_str="+proj=lcc +lat_1=43 +lat_2=45.5 +lat_0=41.75 +lon_0=-120.5 +x_0=400000 +y_0=0 +ellps=GRS80 +units=m +no_defs";
|
63 |
|
|
lookup<-function(r,lat,lon) {
|
64 |
|
|
xy<-project(cbind(lon,lat),proj_str);
|
65 |
|
|
cidx<-cellFromXY(r,xy);
|
66 |
|
|
return(r[cidx])
|
67 |
|
|
}
|
68 |
|
|
sta_tmax_from_lst=lookup(themolst,sta_lola$lat,sta_lola$lon) #Extracted values of LST for the stations
|
69 |
|
|
|
70 |
|
|
#########
|
71 |
|
|
# STEP 4 - bias at stations
|
72 |
|
|
#########
|
73 |
|
|
|
74 |
|
|
sta_bias=sta_tmax_from_lst-modst$TMax; #That is the difference between the monthly LST mean and monthly station mean
|
75 |
|
|
#Added by Benoit
|
76 |
|
|
modst$LSTD_bias<-sta_bias #Adding bias to data frame modst containning the monthly average for 10 years
|
77 |
|
|
|
78 |
|
|
bias_xy=project(as.matrix(sta_lola),proj_str)
|
79 |
|
|
# png(paste("LST_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""))
|
80 |
|
|
# plot(modst$TMax,sta_tmax_from_lst,xlab="Station mo Tmax",ylab="LST mo Tmax",main=paste("LST vs TMax for",datelabel,sep=" "))
|
81 |
|
|
# abline(0,1)
|
82 |
|
|
# dev.off()
|
83 |
|
|
|
84 |
|
|
#added by Benoit
|
85 |
|
|
#x<-ghcn.subsets[[i]] #Holds both training and testing for instance 161 rows for Jan 1
|
86 |
|
|
x<-data_v
|
87 |
|
|
d<-data_s
|
88 |
|
|
|
89 |
|
|
pos<-match("value",names(d)) #Find column with name "value"
|
90 |
|
|
#names(d)[pos]<-c("dailyTmax")
|
91 |
|
|
names(d)[pos]<-y_var_name
|
92 |
|
|
names(x)[pos]<-y_var_name
|
93 |
|
|
#names(x)[pos]<-c("dailyTmax")
|
94 |
|
|
d$dailyTmax=(as.numeric(d$dailyTmax))/10 #stored as 1/10 degree C to allow integer storage
|
95 |
|
|
x$dailyTmax=(as.numeric(x$dailyTmax))/10 #stored as 1/10 degree C to allow integer storage
|
96 |
|
|
pos<-match("station",names(d)) #Find column with name "value"
|
97 |
|
|
names(d)[pos]<-c("id")
|
98 |
|
|
names(x)[pos]<-c("id")
|
99 |
d13c7dc1
|
Benoit Parmentier
|
names(modst)[1]<-c("id") #modst contains the average tmax per month for every stations...it has 193 rows
|
100 |
|
|
|
101 |
|
|
dmoday=merge(modst,d,by="id",suffixes=c("",".y2")) #LOOSING DATA HERE!!! from 113 t0 103
|
102 |
|
|
xmoday=merge(modst,x,by="id",suffixes=c("",".y2")) #LOOSING DATA HERE!!! from 48 t0 43
|
103 |
|
|
mod_pat<-glob2rx("*.y2")
|
104 |
|
|
var_pat<-grep(mod_pat,names(dmoday),value=FALSE) # using grep with "value" extracts the matching names
|
105 |
|
|
dmoday<-dmoday[,-var_pat]
|
106 |
|
|
mod_pat<-glob2rx("*.y2")
|
107 |
|
|
var_pat<-grep(mod_pat,names(xmoday),value=FALSE) # using grep with "value" extracts the matching names
|
108 |
|
|
xmoday<-xmoday[,-var_pat] #Removing duplicate columns
|
109 |
|
|
|
110 |
|
|
#dmoday=merge(modst,d,by="id") #LOOSING DATA HERE!!! from 113 t0 103
|
111 |
|
|
#xmoday=merge(modst,x,by="id") #LOOSING DATA HERE!!! from 48 t0 43
|
112 |
|
|
#names(dmoday)[4]<-c("lat")
|
113 |
|
|
#names(dmoday)[5]<-c("lon") #dmoday contains all the the information: BIAS, monn
|
114 |
|
|
#names(xmoday)[4]<-c("lat")
|
115 |
|
|
#names(xmoday)[5]<-c("lon") #dmoday contains all the the information: BIAS, monn
|
116 |
e64dc6d0
|
Benoit Parmentier
|
|
117 |
|
|
data_v<-xmoday
|
118 |
|
|
###
|
119 |
|
|
|
120 |
|
|
#dmoday contains the daily tmax values for training with TMax being the monthly station tmax mean
|
121 |
|
|
#xmoday contains the daily tmax values for validation with TMax being the monthly station tmax mean
|
122 |
|
|
|
123 |
|
|
# windows()
|
124 |
|
|
#png(paste("LST_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""))
|
125 |
|
|
png(paste("Daily_tmax_monthly_TMax_scatterplot_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],
|
126 |
|
|
out_prefix,".png", sep=""))
|
127 |
|
|
plot(dailyTmax~TMax,data=dmoday,xlab="Mo Tmax",ylab=paste("Daily for",datelabel),main="across stations in OR")
|
128 |
|
|
#savePlot(paste("Daily_tmax_monthly_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""), type="png")
|
129 |
|
|
#png(paste("LST_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""))
|
130 |
|
|
dev.off()
|
131 |
|
|
|
132 |
|
|
########
|
133 |
|
|
# STEP 5 - interpolate bias/climatology
|
134 |
|
|
########
|
135 |
|
|
|
136 |
|
|
# ?? include covariates like elev, distance to coast, cloud frequency, tree height
|
137 |
|
|
#library(fields)
|
138 |
|
|
#windows()
|
139 |
|
|
#quilt.plot(sta_lola,sta_bias,main="Bias at stations",asp=1)
|
140 |
|
|
#US(add=T,col="magenta",lwd=2)
|
141 |
|
|
#fitbias<-Tps(bias_xy,sta_bias) #use TPS or krige
|
142 |
|
|
|
143 |
|
|
#Adding options to use only training stations: 07/11/2012
|
144 |
|
|
bias_xy<-project(as.matrix(sta_lola),proj_str)
|
145 |
d13c7dc1
|
Benoit Parmentier
|
clim_xy<-project(as.matrix(sta_lola),proj_str) #This is the coordinates of monthly station location (193)
|
146 |
e64dc6d0
|
Benoit Parmentier
|
#bias_xy2=project(as.matrix(c(dmoday$lon,dmoday$lat),proj_str)
|
147 |
|
|
if(bias_val==1){
|
148 |
d13c7dc1
|
Benoit Parmentier
|
sta_bias<-dmoday$LSTD_bias
|
149 |
|
|
bias_xy<-cbind(dmoday$x_OR83M,dmoday$y_OR83M) #This will use only stations from training daily samples for climatology step if bias_val=1
|
150 |
e64dc6d0
|
Benoit Parmentier
|
}
|
151 |
|
|
|
152 |
d13c7dc1
|
Benoit Parmentier
|
sta_clim<-modst$TMax #This contains the monthly climatology...used in the prediction of the monthly surface
|
153 |
e64dc6d0
|
Benoit Parmentier
|
|
154 |
|
|
#fitbias<-Krig(bias_xy,sta_bias,theta=1e5) #use TPS or krige
|
155 |
|
|
fitclim<-Krig(clim_xy,sta_clim,theta=1e5)
|
156 |
|
|
|
157 |
|
|
#The output is a krig object using fields
|
158 |
|
|
#mod9a<-fitbias
|
159 |
|
|
mod9a<-fitclim
|
160 |
|
|
|
161 |
|
|
# Creating plot of bias surface and saving it
|
162 |
|
|
#X11()
|
163 |
|
|
png(paste("Climtology_surface_LST_TMax_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],
|
164 |
|
|
out_prefix,".png", sep="")) #Create file to write a plot
|
165 |
|
|
datelabel2=format(ISOdate(year,mo,day),"%B ") #added by Benoit, label
|
166 |
|
|
surface(fitclim,col=rev(terrain.colors(100)),asp=1,main=paste("Interpolated clim for",datelabel2,sep=" ")) #Plot to file
|
167 |
|
|
#savePlot(paste("Bias_surface_LST_TMax_",dates[i],out_prefix,".png", sep=""), type="png")
|
168 |
|
|
dev.off() #Release the hold to the file
|
169 |
|
|
|
170 |
|
|
#US(add=T,col="magenta",lwd=2)
|
171 |
|
|
|
172 |
|
|
##########
|
173 |
d13c7dc1
|
Benoit Parmentier
|
# STEP 7 - interpolate delta across space: this is the daily deviation from the monthly average
|
174 |
e64dc6d0
|
Benoit Parmentier
|
##########
|
175 |
|
|
|
176 |
|
|
daily_sta_lola=dmoday[,c("lon","lat")] #could be same as before but why assume merge does this - assume not
|
177 |
|
|
daily_sta_xy=project(as.matrix(daily_sta_lola),proj_str)
|
178 |
|
|
daily_delta=dmoday$dailyTmax-dmoday$TMax
|
179 |
|
|
|
180 |
d13c7dc1
|
Benoit Parmentier
|
daily_deltaclim<-dmoday$dailyTmax-dmoday$TMax #For daily surface interpolation...
|
181 |
|
|
daily_deltaclim_v<-data_v$dailyTmax-data_v$TMax #For validation...
|
182 |
e64dc6d0
|
Benoit Parmentier
|
#dmoday$daily_deltaclim <-daily_deltaclim
|
183 |
|
|
#fitdelta<-Tps(daily_sta_xy,daily_delta) #use TPS or krige
|
184 |
|
|
fitdelta<-Krig(daily_sta_xy,daily_delta,theta=1e5) #use TPS or krige
|
185 |
|
|
fitdeltaclim<-Krig(daily_sta_xy,daily_deltaclim,theta=1e5) #use TPS or krige
|
186 |
|
|
|
187 |
|
|
#Kriging using fields package
|
188 |
|
|
#mod9b<-fitdelta
|
189 |
|
|
mod9b<-fitdeltaclim
|
190 |
|
|
# Creating plot of bias surface and saving it
|
191 |
|
|
#X11()
|
192 |
|
|
png(paste("Deltaclim_surface_TMax_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],
|
193 |
|
|
out_prefix,".png", sep=""))
|
194 |
|
|
surface(fitdeltaclim,col=rev(terrain.colors(100)),asp=1,main=paste("Interpolated deltaclim for",datelabel,sep=" "))
|
195 |
|
|
#savePlot(paste("Delta_surface_LST_TMax_",dates[i],out_prefix,".png", sep=""), type="png")
|
196 |
|
|
dev.off()
|
197 |
|
|
#US(add=T,col="magenta",lwd=2)
|
198 |
|
|
#
|
199 |
|
|
|
200 |
|
|
#### Added by Benoit on 06/19
|
201 |
|
|
data_s<-dmoday #put the
|
202 |
|
|
#data_s$daily_delta<-daily_delta
|
203 |
|
|
data_s$daily_deltaclim<-daily_deltaclim
|
204 |
|
|
data_v$daily_deltaclim<-daily_deltaclim_v
|
205 |
|
|
#data_s$y_var<-daily_delta #y_var is the variable currently being modeled, may be better with BIAS!!
|
206 |
|
|
#data_s$y_var<-data_s$LSTD_bias
|
207 |
|
|
#### Added by Benoit ends
|
208 |
|
|
|
209 |
|
|
#########
|
210 |
d13c7dc1
|
Benoit Parmentier
|
# STEP 8 - assemble final answer - T= LST-Bias(interpolated)+delta(interpolated) (This is for fusion not implemented in this script...)
|
211 |
|
|
# T= clim(interpolated) + deltaclim(interpolated) (This is for CAI)
|
212 |
e64dc6d0
|
Benoit Parmentier
|
#########
|
213 |
|
|
|
214 |
|
|
#bias_rast=interpolate(themolst,fitbias) #interpolation using function from raster package
|
215 |
|
|
clim_rast=interpolate(themolst,fitclim) #interpolation using function from raster package
|
216 |
|
|
#themolst is raster layer, fitbias is "Krig" object from bias surface
|
217 |
|
|
#plot(bias_rast,main="Raster bias") #This not displaying...
|
218 |
|
|
|
219 |
|
|
#Saving kriged surface in raster images
|
220 |
|
|
data_name<-paste("clim_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],sep="")
|
221 |
|
|
raster_name<-paste("CAI_",data_name,out_prefix,".rst", sep="")
|
222 |
|
|
writeRaster(clim_rast, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
223 |
|
|
|
224 |
|
|
#daily_delta_rast=interpolate(themolst,fitdelta) #Interpolation of the bias surface...
|
225 |
|
|
daily_deltaclim_rast=interpolate(themolst,fitdeltaclim) #Interpolation of the bias surface...
|
226 |
|
|
|
227 |
|
|
#plot(daily_delta_rast,main="Raster Daily Delta")
|
228 |
|
|
|
229 |
|
|
#Saving kriged surface in raster images
|
230 |
|
|
data_name<-paste("deltaclim_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],sep="")
|
231 |
|
|
raster_name<-paste("CAI_",data_name,out_prefix,".rst", sep="")
|
232 |
|
|
writeRaster(daily_deltaclim_rast, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
233 |
|
|
|
234 |
|
|
#tmax_predicted=themolst+daily_delta_rast-bias_rast #Final surface as a raster layer...eqt ok
|
235 |
|
|
tmax_predicted<-daily_deltaclim_rast + clim_rast #Final surface as a raster layer...
|
236 |
|
|
#tmp6<-data_s$daily_deltaclim +data_s$TMax
|
237 |
|
|
#tmp7<-extract(tmax_predicted,data_s)
|
238 |
|
|
#plot(tmax_predicted,main="Predicted daily")
|
239 |
|
|
|
240 |
|
|
#Saving kriged surface in raster images
|
241 |
|
|
data_name<-paste("tmax_predicted_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],sep="")
|
242 |
|
|
raster_name<-paste("CAI_",data_name,out_prefix,".rst", sep="")
|
243 |
|
|
writeRaster(tmax_predicted, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
244 |
|
|
|
245 |
|
|
########
|
246 |
|
|
# check: assessment of results: validation
|
247 |
|
|
########
|
248 |
|
|
RMSE<-function(x,y) {return(mean((x-y)^2)^0.5)}
|
249 |
|
|
MAE_fun<-function(x,y) {return(mean(abs(x-y)))}
|
250 |
|
|
#ME_fun<-function(x,y){return(mean(abs(y)))}
|
251 |
|
|
#FIT ASSESSMENT
|
252 |
|
|
sta_pred_data_s=lookup(tmax_predicted,data_s$lat,data_s$lon)
|
253 |
|
|
rmse_fit=RMSE(sta_pred_data_s,data_s$dailyTmax)
|
254 |
|
|
mae_fit=MAE_fun(sta_pred_data_s,data_s$dailyTmax)
|
255 |
|
|
|
256 |
|
|
sta_pred=lookup(tmax_predicted,data_v$lat,data_v$lon)
|
257 |
|
|
#sta_pred=lookup(tmax_predicted,daily_sta_lola$lat,daily_sta_lola$lon)
|
258 |
|
|
#rmse=RMSE(sta_pred,dmoday$dailyTmax)
|
259 |
|
|
#pos<-match("value",names(data_v)) #Find column with name "value"
|
260 |
|
|
#names(data_v)[pos]<-c("dailyTmax")
|
261 |
|
|
tmax<-data_v$dailyTmax
|
262 |
|
|
#data_v$dailyTmax<-tmax
|
263 |
|
|
rmse=RMSE(sta_pred,tmax)
|
264 |
|
|
mae<-MAE_fun(sta_pred,tmax)
|
265 |
|
|
r2<-cor(sta_pred,tmax)^2 #R2, coef. of var
|
266 |
|
|
me<-mean(sta_pred-tmax)
|
267 |
|
|
|
268 |
|
|
#plot(sta_pred~dmoday$dailyTmax,xlab=paste("Actual daily for",datelabel),ylab="Pred daily",main=paste("RMSE=",rmse))
|
269 |
|
|
|
270 |
|
|
png(paste("Predicted_tmax_versus_observed_scatterplot_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",
|
271 |
|
|
sampling_dat$run_samp[i],out_prefix,".png", sep=""))
|
272 |
|
|
plot(sta_pred~tmax,xlab=paste("Actual daily for",datelabel),ylab="Pred daily",main=paste("RMSE=",rmse))
|
273 |
|
|
abline(0,1)
|
274 |
|
|
#savePlot(paste("Predicted_tmax_versus_observed_scatterplot_",dates[i],out_prefix,".png", sep=""), type="png")
|
275 |
|
|
dev.off()
|
276 |
|
|
#resid=sta_pred-dmoday$dailyTmax
|
277 |
|
|
resid=sta_pred-tmax
|
278 |
|
|
#quilt.plot(daily_sta_lola,resid)
|
279 |
|
|
|
280 |
d13c7dc1
|
Benoit Parmentier
|
|
281 |
e64dc6d0
|
Benoit Parmentier
|
###BEFORE GAM prediction the data object must be transformed to SDF
|
282 |
|
|
|
283 |
|
|
coords<- data_v[,c('x_OR83M','y_OR83M')]
|
284 |
|
|
coordinates(data_v)<-coords
|
285 |
|
|
proj4string(data_v)<-CRS #Need to assign coordinates...
|
286 |
|
|
coords<- data_s[,c('x_OR83M','y_OR83M')]
|
287 |
|
|
coordinates(data_s)<-coords
|
288 |
|
|
proj4string(data_s)<-CRS #Need to assign coordinates..
|
289 |
d13c7dc1
|
Benoit Parmentier
|
coords<- modst[,c('x_OR83M','y_OR83M')]
|
290 |
|
|
coordinates(modst)<-coords
|
291 |
|
|
proj4string(modst)<-CRS #Need to assign coordinates..
|
292 |
e64dc6d0
|
Benoit Parmentier
|
|
293 |
|
|
ns<-nrow(data_s) #This is added to because some loss of data might have happened because of the averaging...
|
294 |
|
|
nv<-nrow(data_v)
|
295 |
|
|
|
296 |
|
|
###GAM PREDICTION
|
297 |
|
|
|
298 |
|
|
#data_s$y_var<-data_s$dailyTmax #This shoudl be changed for any variable!!!
|
299 |
|
|
#data_v$y_var<-data_v$dailyTmax
|
300 |
|
|
#data_v$y_var<-data_v$daily_deltaclim
|
301 |
|
|
data_s$y_var<-data_s$daily_deltaclim
|
302 |
|
|
data_v$y_var<-data_v$daily_deltaclim
|
303 |
|
|
|
304 |
d13c7dc1
|
Benoit Parmentier
|
if (climgam==1){ #This is an option to use covariates in the daily surface...
|
305 |
e64dc6d0
|
Benoit Parmentier
|
data_s$y_var<-data_s$TMax
|
306 |
|
|
data_v$y_var<-data_v$TMax
|
307 |
d13c7dc1
|
Benoit Parmentier
|
data_month<-modst
|
308 |
|
|
data_month$y_var<-modst$TMax
|
309 |
e64dc6d0
|
Benoit Parmentier
|
}
|
310 |
|
|
|
311 |
|
|
#Model and response variable can be changed without affecting the script
|
312 |
|
|
|
313 |
|
|
formula1 <- as.formula("y_var ~ s(lat) + s(lon) + s(ELEV_SRTM)", env=.GlobalEnv)
|
314 |
|
|
formula2 <- as.formula("y_var~ s(lat,lon)+ s(ELEV_SRTM)", env=.GlobalEnv)
|
315 |
|
|
formula3 <- as.formula("y_var~ s(lat) + s (lon) + s (ELEV_SRTM) + s (Northness)+ s (Eastness) + s(DISTOC)", env=.GlobalEnv)
|
316 |
|
|
formula4 <- as.formula("y_var~ s(lat) + s (lon) + s(ELEV_SRTM) + s(Northness) + s (Eastness) + s(DISTOC) + s(LST)", env=.GlobalEnv)
|
317 |
|
|
formula5 <- as.formula("y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST)", env=.GlobalEnv)
|
318 |
|
|
formula6 <- as.formula("y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST)+s(LC1)", env=.GlobalEnv)
|
319 |
|
|
formula7 <- as.formula("y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST)+s(LC3)", env=.GlobalEnv)
|
320 |
|
|
formula8 <- as.formula("y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST) + s(LC1,LC3)", env=.GlobalEnv)
|
321 |
|
|
|
322 |
d13c7dc1
|
Benoit Parmentier
|
#mod1<- try(gam(formula1, data=data_s))
|
323 |
|
|
#mod2<- try(gam(formula2, data=data_s)) #modified nesting....from 3 to 2
|
324 |
|
|
#mod3<- try(gam(formula3, data=data_s))
|
325 |
|
|
#mod4<- try(gam(formula4, data=data_s))
|
326 |
|
|
#mod5<- try(gam(formula5, data=data_s))
|
327 |
|
|
#mod6<- try(gam(formula6, data=data_s))
|
328 |
|
|
#mod7<- try(gam(formula7, data=data_s))
|
329 |
|
|
#mod8<- try(gam(formula8, data=data_s))
|
330 |
e64dc6d0
|
Benoit Parmentier
|
|
331 |
d13c7dc1
|
Benoit Parmentier
|
if (climgam==1){ #This will automatically use monthly station data in the second step
|
332 |
|
|
mod1<- try(gam(formula1, data=data_month))
|
333 |
|
|
mod2<- try(gam(formula2, data=data_month)) #modified nesting....from 3 to 2
|
334 |
|
|
mod3<- try(gam(formula3, data=data_month))
|
335 |
|
|
mod4<- try(gam(formula4, data=data_month))
|
336 |
|
|
mod5<- try(gam(formula5, data=data_month))
|
337 |
|
|
mod6<- try(gam(formula6, data=data_month))
|
338 |
|
|
mod7<- try(gam(formula7, data=data_month))
|
339 |
|
|
mod8<- try(gam(formula8, data=data_month))
|
340 |
|
|
|
341 |
|
|
} else if (climgam==0){ #This will use daily delta in the second step
|
342 |
|
|
|
343 |
|
|
mod1<- try(gam(formula1, data=data_s))
|
344 |
|
|
mod2<- try(gam(formula2, data=data_s)) #modified nesting....from 3 to 2
|
345 |
|
|
mod3<- try(gam(formula3, data=data_s))
|
346 |
|
|
mod4<- try(gam(formula4, data=data_s))
|
347 |
|
|
mod5<- try(gam(formula5, data=data_s))
|
348 |
|
|
mod6<- try(gam(formula6, data=data_s))
|
349 |
|
|
mod7<- try(gam(formula7, data=data_s))
|
350 |
|
|
mod8<- try(gam(formula8, data=data_s))
|
351 |
|
|
}
|
352 |
|
|
|
353 |
e64dc6d0
|
Benoit Parmentier
|
### Added by benoit
|
354 |
|
|
#Store results using TPS
|
355 |
|
|
j=nmodels+1
|
356 |
|
|
results_RMSE[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
357 |
|
|
results_RMSE[2]<- ns #number of stations used in the training stage
|
358 |
|
|
results_RMSE[3]<- "RMSE"
|
359 |
|
|
|
360 |
|
|
results_RMSE[j+3]<- rmse #Storing RMSE for the model j
|
361 |
|
|
|
362 |
|
|
results_RMSE_f[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
363 |
|
|
results_RMSE_f[2]<- ns #number of stations used in the training stage
|
364 |
|
|
results_RMSE_f[3]<- "RMSE_f"
|
365 |
|
|
results_RMSE_f[j+3]<- rmse_fit #Storing RMSE for the model j
|
366 |
|
|
|
367 |
|
|
results_MAE_f[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
368 |
|
|
results_MAE_f[2]<- ns #number of stations used in the training stage
|
369 |
|
|
results_MAE_f[3]<- "RMSE_f"
|
370 |
|
|
results_MAE_f[j+3]<- mae_fit #Storing RMSE for the model j
|
371 |
|
|
|
372 |
|
|
results_MAE[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
373 |
|
|
results_MAE[2]<- ns #number of stations used in the training stage
|
374 |
|
|
results_MAE[3]<- "MAE"
|
375 |
|
|
results_MAE[j+3]<- mae #Storing RMSE for the model j
|
376 |
|
|
|
377 |
|
|
results_ME[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
378 |
|
|
results_ME[2]<- ns #number of stations used in the training stage
|
379 |
|
|
results_ME[3]<- "ME"
|
380 |
|
|
results_ME[j+3]<- me #Storing RMSE for the model j
|
381 |
|
|
|
382 |
|
|
results_R2[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
383 |
|
|
results_R2[2]<- ns #number of stations used in the training stage
|
384 |
|
|
results_R2[3]<- "R2"
|
385 |
|
|
results_R2[j+3]<- r2 #Storing RMSE for the model j
|
386 |
|
|
|
387 |
|
|
pred_mod<-paste("pred_mod",j,sep="")
|
388 |
|
|
#Adding the results back into the original dataframes.
|
389 |
|
|
data_s[[pred_mod]]<-sta_pred_data_s
|
390 |
|
|
data_v[[pred_mod]]<-sta_pred
|
391 |
|
|
|
392 |
|
|
#Model assessment: RMSE and then krig the residuals....!
|
393 |
|
|
|
394 |
|
|
res_mod_s<- data_s$dailyTmax - data_s[[pred_mod]] #Residuals from kriging training
|
395 |
|
|
res_mod_v<- data_v$dailyTmax - data_v[[pred_mod]] #Residuals from kriging validation
|
396 |
|
|
|
397 |
|
|
name2<-paste("res_mod",j,sep="")
|
398 |
|
|
data_v[[name2]]<-as.numeric(res_mod_v)
|
399 |
|
|
data_s[[name2]]<-as.numeric(res_mod_s)
|
400 |
|
|
|
401 |
|
|
#ns<-nrow(data_s) #This is added to because some loss of data might have happened because of the averaging...
|
402 |
|
|
#nv<-nrow(data_v)
|
403 |
d13c7dc1
|
Benoit Parmentier
|
#browser()
|
404 |
e64dc6d0
|
Benoit Parmentier
|
|
405 |
|
|
for (j in 1:nmodels){
|
406 |
|
|
|
407 |
|
|
##Model assessment: specific diagnostic/metrics for GAM
|
408 |
|
|
|
409 |
|
|
name<-paste("mod",j,sep="") #modj is the name of The "j" model (mod1 if j=1)
|
410 |
|
|
mod<-get(name) #accessing GAM model ojbect "j"
|
411 |
|
|
|
412 |
|
|
#If mod "j" is not a model object
|
413 |
|
|
if (inherits(mod,"try-error")) {
|
414 |
|
|
results_m1[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
415 |
|
|
results_m1[1,2]<- ns #number of stations used in the training stage
|
416 |
|
|
results_m1[1,3]<- "AIC"
|
417 |
|
|
results_m1[1,j+3]<- NA
|
418 |
|
|
|
419 |
|
|
results_m2[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
420 |
|
|
results_m2[1,2]<- ns #number of stations used in the training
|
421 |
|
|
results_m2[1,3]<- "GCV"
|
422 |
|
|
results_m2[1,j+3]<- NA
|
423 |
|
|
|
424 |
|
|
results_m3[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
425 |
|
|
results_m3[1,2]<- ns #number of stations used in the training stage
|
426 |
|
|
results_m3[1,3]<- "DEV"
|
427 |
|
|
results_m3[1,j+3]<- NA
|
428 |
|
|
|
429 |
|
|
results_RMSE_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
430 |
|
|
results_RMSE_f[1,2]<- ns #number of stations used in the training stage
|
431 |
|
|
results_RMSE_f[1,3]<- "RSME_f"
|
432 |
|
|
results_RMSE_f[1,j+3]<- NA
|
433 |
|
|
|
434 |
|
|
results_MAE_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
435 |
|
|
results_MAE_f[1,2]<- ns #number of stations used in the training stage
|
436 |
|
|
results_MAE_f[1,3]<- "MAE_f"
|
437 |
|
|
results_MAE_f[1,j+3]<-NA
|
438 |
|
|
|
439 |
|
|
results_R2_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
440 |
|
|
results_R2_f[1,2]<- ns #number of stations used in the training stage
|
441 |
|
|
results_R2_f[1,3]<- "R2_f"
|
442 |
|
|
results_R2_f[1,j+3]<- NA #Storing R2 for the model j
|
443 |
|
|
|
444 |
|
|
|
445 |
|
|
results_RMSE[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
446 |
|
|
results_RMSE[1,2]<- ns #number of stations used in the training stage
|
447 |
|
|
results_RMSE[1,3]<- "RMSE"
|
448 |
|
|
results_RMSE[1,j+3]<- NA #Storing RMSE for the model j
|
449 |
|
|
results_MAE[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
450 |
|
|
results_MAE[1,2]<- ns #number of stations used in the training stage
|
451 |
|
|
results_MAE[1,3]<- "MAE"
|
452 |
|
|
results_MAE[1,j+3]<- NA #Storing MAE for the model j
|
453 |
|
|
results_ME[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
454 |
|
|
results_ME[1,2]<- ns #number of stations used in the training stage
|
455 |
|
|
results_ME[1,3]<- "ME"
|
456 |
|
|
results_ME[1,j+3]<- NA #Storing ME for the model j
|
457 |
|
|
results_R2[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
458 |
|
|
results_R2[1,2]<- ns #number of stations used in the training stage
|
459 |
|
|
results_R2[1,3]<- "R2"
|
460 |
|
|
results_R2[1,j+3]<- NA #Storing R2 for the model j
|
461 |
|
|
|
462 |
|
|
}
|
463 |
|
|
|
464 |
|
|
#If mod is a modelobject
|
465 |
|
|
|
466 |
|
|
#If mod "j" is not a model object
|
467 |
|
|
if (inherits(mod,"gam")) {
|
468 |
|
|
|
469 |
d13c7dc1
|
Benoit Parmentier
|
# model specific metrics
|
470 |
e64dc6d0
|
Benoit Parmentier
|
results_m1[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
471 |
|
|
results_m1[1,2]<- ns #number of stations used in the training stage
|
472 |
|
|
results_m1[1,3]<- "AIC"
|
473 |
|
|
results_m1[1,j+3]<- AIC (mod)
|
474 |
|
|
|
475 |
|
|
results_m2[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
476 |
|
|
results_m2[1,2]<- ns #number of stations used in the training
|
477 |
|
|
results_m2[1,3]<- "GCV"
|
478 |
|
|
results_m2[1,j+3]<- mod$gcv.ubre
|
479 |
|
|
|
480 |
|
|
results_m3[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
481 |
|
|
results_m3[1,2]<- ns #number of stations used in the training stage
|
482 |
|
|
results_m3[1,3]<- "DEV"
|
483 |
|
|
results_m3[1,j+3]<- mod$deviance
|
484 |
|
|
|
485 |
|
|
##Model assessment: general diagnostic/metrics
|
486 |
|
|
##validation: using the testing data
|
487 |
|
|
if (predval==1) {
|
488 |
|
|
|
489 |
|
|
##Model assessment: specific diagnostic/metrics for GAM
|
490 |
|
|
|
491 |
|
|
name<-paste("mod",j,sep="") #modj is the name of The "j" model (mod1 if j=1)
|
492 |
|
|
mod<-get(name) #accessing GAM model ojbect "j"
|
493 |
|
|
|
494 |
|
|
s_sgdf<-as(s_raster,"SpatialGridDataFrame") #Conversion to spatial grid data frame
|
495 |
|
|
|
496 |
|
|
rpred<- predict(mod, newdata=s_sgdf, se.fit = TRUE) #Using the coeff to predict new values.
|
497 |
|
|
y_pred<-rpred$fit
|
498 |
|
|
raster_pred<-r1
|
499 |
|
|
layerNames(raster_pred)<-"y_pred"
|
500 |
|
|
values(raster_pred)<-as.numeric(y_pred)
|
501 |
|
|
data_name<-paste("predicted_mod",j,"_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],sep="")
|
502 |
|
|
raster_name<-paste("GAMCAI_",data_name,out_prefix,".rst", sep="")
|
503 |
|
|
writeRaster(raster_pred, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
504 |
|
|
#writeRaster(r2, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
505 |
|
|
|
506 |
d13c7dc1
|
Benoit Parmentier
|
tmax_predicted_CAI<-raster_pred + clim_rast #Final surface as a raster layer...taht is if daily prediction with GAM
|
507 |
e64dc6d0
|
Benoit Parmentier
|
if (climgam==1){
|
508 |
|
|
tmax_predicted_CAI<-raster_pred + daily_deltaclim_rast #Final surface as a raster layer...
|
509 |
|
|
}
|
510 |
|
|
|
511 |
|
|
layerNames(tmax_predicted_CAI)<-"y_pred"
|
512 |
|
|
data_name<-paste("predicted_mod",j,"_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],sep="")
|
513 |
d13c7dc1
|
Benoit Parmentier
|
raster_name<-paste("GAMCAI_tmax_predicted_",data_name,out_prefix,".rst", sep="")
|
514 |
e64dc6d0
|
Benoit Parmentier
|
writeRaster(tmax_predicted_CAI, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
515 |
|
|
#writeRaster(r2, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
516 |
|
|
|
517 |
|
|
pred_sgdf<-as(tmax_predicted_CAI,"SpatialGridDataFrame") #Conversion to spatial grid data frame
|
518 |
|
|
#rpred_val_s <- overlay(raster_pred,data_s) #This overlays the kriged surface tmax and the location of weather stations
|
519 |
|
|
|
520 |
|
|
rpred_val_s <- overlay(pred_sgdf,data_s) #This overlays the kriged surface tmax and the location of weather stations
|
521 |
|
|
rpred_val_v <- overlay(pred_sgdf,data_v) #This overlays the kriged surface tmax and the location of weather stations
|
522 |
|
|
|
523 |
|
|
pred_mod<-paste("pred_mod",j,sep="")
|
524 |
|
|
#Adding the results back into the original dataframes.
|
525 |
|
|
data_s[[pred_mod]]<-rpred_val_s$y_pred
|
526 |
|
|
data_v[[pred_mod]]<-rpred_val_v$y_pred
|
527 |
|
|
|
528 |
|
|
#Model assessment: RMSE and then krig the residuals....!
|
529 |
|
|
|
530 |
|
|
res_mod_s<- data_s$dailyTmax - data_s[[pred_mod]] #Residuals from kriging training
|
531 |
|
|
res_mod_v<- data_v$dailyTmax - data_v[[pred_mod]] #Residuals from kriging validation
|
532 |
|
|
|
533 |
|
|
}
|
534 |
|
|
|
535 |
|
|
if (predval==0) {
|
536 |
d13c7dc1
|
Benoit Parmentier
|
|
537 |
e64dc6d0
|
Benoit Parmentier
|
y_mod<- predict(mod, newdata=data_v, se.fit = TRUE) #Using the coeff to predict new values.
|
538 |
|
|
|
539 |
|
|
pred_mod<-paste("pred_mod",j,sep="")
|
540 |
|
|
#Adding the results back into the original dataframes.
|
541 |
|
|
data_s[[pred_mod]]<-as.numeric(mod$fit)
|
542 |
|
|
data_v[[pred_mod]]<-as.numeric(y_mod$fit)
|
543 |
|
|
|
544 |
|
|
#Model assessment: RMSE and then krig the residuals....!
|
545 |
|
|
#y_var_name<-"dailyTmax"
|
546 |
|
|
res_mod_s<- data_s$dailyTmax - data_s[[pred_mod]] #Residuals from kriging training
|
547 |
|
|
res_mod_v<- data_v$dailyTmax - data_v[[pred_mod]] #Residuals from kriging validation
|
548 |
|
|
}
|
549 |
|
|
|
550 |
d13c7dc1
|
Benoit Parmentier
|
#y_var_fit= mod$fit #move it
|
551 |
|
|
#Use res_mod_s so the R2 is based on daily station training
|
552 |
|
|
R2_mod_f<- cor(data_s$dailyTmax,res_mod_s, use="complete")^2
|
553 |
|
|
RMSE_mod_f<- sqrt(mean(res_mod_s^2,na.rm=TRUE))
|
554 |
|
|
|
555 |
|
|
results_RMSE_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
556 |
|
|
results_RMSE_f[1,2]<- ns #number of stations used in the training stage
|
557 |
|
|
results_RMSE_f[1,3]<- "RSME_f"
|
558 |
|
|
#results_RMSE_f[1,j+3]<-sqrt(mean(mod$residuals^2,na.rm=TRUE))
|
559 |
|
|
results_RMSE_f[1,j+3]<-sqrt(mean(res_mod_s^2,na.rm=TRUE))
|
560 |
|
|
|
561 |
|
|
results_MAE_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
562 |
|
|
results_MAE_f[1,2]<- ns #number of stations used in the training stage
|
563 |
|
|
results_MAE_f[1,3]<- "MAE_f"
|
564 |
|
|
#results_MAE_f[j+3]<-sum(abs(y_var_fit-data_s$y_var))/ns
|
565 |
|
|
results_MAE_f[1,j+3]<-mean(abs(res_mod_s),na.rm=TRUE)
|
566 |
|
|
|
567 |
|
|
results_R2_f[1,1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
568 |
|
|
results_R2_f[1,2]<- ns #number of stations used in the training stage
|
569 |
|
|
results_R2_f[1,3]<- "R2_f"
|
570 |
|
|
results_R2_f[1,j+3]<- R2_mod_f #Storing R2 for the model j
|
571 |
|
|
|
572 |
|
|
#### Now calculate validation metrics
|
573 |
e64dc6d0
|
Benoit Parmentier
|
res_mod<-res_mod_v
|
574 |
|
|
|
575 |
|
|
#RMSE_mod <- sqrt(sum(res_mod^2)/nv) #RMSE FOR REGRESSION STEP 1: GAM
|
576 |
|
|
RMSE_mod<- sqrt(mean(res_mod^2,na.rm=TRUE))
|
577 |
|
|
#MAE_mod<- sum(abs(res_mod),na.rm=TRUE)/(nv-sum(is.na(res_mod))) #MAE from kriged surface validation
|
578 |
|
|
MAE_mod<- mean(abs(res_mod), na.rm=TRUE)
|
579 |
|
|
#ME_mod<- sum(res_mod,na.rm=TRUE)/(nv-sum(is.na(res_mod))) #ME, Mean Error or bias FOR REGRESSION STEP 1: GAM
|
580 |
|
|
ME_mod<- mean(res_mod,na.rm=TRUE) #ME, Mean Error or bias FOR REGRESSION STEP 1: GAM
|
581 |
|
|
#R2_mod<- cor(data_v$y_var,data_v[[pred_mod]])^2 #R2, coef. of var FOR REGRESSION STEP 1: GAM
|
582 |
|
|
pred_mod<-paste("pred_mod",j,sep="")
|
583 |
|
|
R2_mod<- cor(data_v$dailyTmax,data_v[[pred_mod]], use="complete")^2
|
584 |
|
|
results_RMSE[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
585 |
|
|
results_RMSE[2]<- ns #number of stations used in the training stage
|
586 |
|
|
results_RMSE[3]<- "RMSE"
|
587 |
|
|
results_RMSE[j+3]<- RMSE_mod #Storing RMSE for the model j
|
588 |
|
|
results_MAE[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
589 |
|
|
results_MAE[2]<- ns #number of stations used in the training stage
|
590 |
|
|
results_MAE[3]<- "MAE"
|
591 |
|
|
results_MAE[j+3]<- MAE_mod #Storing MAE for the model j
|
592 |
|
|
results_ME[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
593 |
|
|
results_ME[2]<- ns #number of stations used in the training stage
|
594 |
|
|
results_ME[3]<- "ME"
|
595 |
|
|
results_ME[j+3]<- ME_mod #Storing ME for the model j
|
596 |
|
|
results_R2[1]<- sampling_dat$date[i] #storing the interpolation dates in the first column
|
597 |
|
|
results_R2[2]<- ns #number of stations used in the training stage
|
598 |
|
|
results_R2[3]<- "R2"
|
599 |
|
|
results_R2[j+3]<- R2_mod #Storing R2 for the model j
|
600 |
|
|
|
601 |
|
|
#Saving residuals and prediction in the dataframes: tmax predicted from GAM
|
602 |
|
|
|
603 |
|
|
name2<-paste("res_mod",j,sep="")
|
604 |
|
|
data_v[[name2]]<-as.numeric(res_mod_v)
|
605 |
|
|
data_s[[name2]]<-as.numeric(res_mod_s)
|
606 |
|
|
#end of loop calculating RMSE
|
607 |
|
|
}
|
608 |
|
|
}
|
609 |
|
|
|
610 |
|
|
#if (i==length(dates)){
|
611 |
|
|
|
612 |
|
|
|
613 |
|
|
#Specific diagnostic measures related to the testing datasets
|
614 |
|
|
|
615 |
|
|
results_table_RMSE<-as.data.frame(results_RMSE)
|
616 |
|
|
results_table_MAE<-as.data.frame(results_MAE)
|
617 |
|
|
results_table_ME<-as.data.frame(results_ME)
|
618 |
|
|
results_table_R2<-as.data.frame(results_R2)
|
619 |
|
|
results_table_RMSE_f<-as.data.frame(results_RMSE_f)
|
620 |
|
|
results_table_MAE_f<-as.data.frame(results_MAE_f)
|
621 |
|
|
results_table_R2_f<-as.data.frame(results_R2_f)
|
622 |
|
|
|
623 |
|
|
results_table_m1<-as.data.frame(results_m1)
|
624 |
|
|
results_table_m2<-as.data.frame(results_m2)
|
625 |
|
|
results_table_m3<-as.data.frame(results_m3)
|
626 |
|
|
|
627 |
|
|
tb_metrics1<-rbind(results_table_RMSE,results_table_MAE, results_table_ME,
|
628 |
|
|
results_table_R2,results_table_RMSE_f,results_table_MAE_f,results_table_R2_f) #
|
629 |
|
|
tb_metrics2<-rbind(results_table_m1,results_table_m2, results_table_m3)
|
630 |
|
|
cname<-c("dates","ns","metric","mod1", "mod2","mod3", "mod4", "mod5", "mod6", "mod7","mod8","mod9")
|
631 |
|
|
colnames(tb_metrics1)<-cname
|
632 |
|
|
cname<-c("dates","ns","metric","mod1", "mod2","mod3", "mod4", "mod5", "mod6", "mod7","mod8")
|
633 |
|
|
colnames(tb_metrics2)<-cname
|
634 |
|
|
#colnames(results_table_RMSE)<-cname
|
635 |
|
|
#colnames(results_table_RMSE_f)<-cname
|
636 |
|
|
#tb_diagnostic1<-results_table_RMSE #measures of validation
|
637 |
|
|
#tb_diagnostic2<-results_table_RMSE_f #measures of fit
|
638 |
|
|
|
639 |
|
|
#write.table(tb_diagnostic1, file= paste(path,"/","results_fusion_Assessment_measure1",out_prefix,".txt",sep=""), sep=",")
|
640 |
|
|
|
641 |
|
|
#}
|
642 |
|
|
print(paste(sampling_dat$date[i],"processed"))
|
643 |
|
|
# Kriging object may need to be modified...because it contains the full image of prediction!!
|
644 |
|
|
##loop through model objects data frame and set field to zero...
|
645 |
|
|
|
646 |
|
|
mod_obj<-list(mod1,mod2,mod3,mod4,mod5,mod6,mod7,mod8,mod9a,mod9b)
|
647 |
|
|
names(mod_obj)<-c("mod1","mod2","mod3","mod4","mod5","mod6","mod7","mod8","mod9a","mod9b") #generate names automatically??
|
648 |
|
|
#results_list<-list(data_s,data_v,tb_metrics1,tb_metrics2)
|
649 |
d13c7dc1
|
Benoit Parmentier
|
#results_list<-list(data_s,data_v,tb_metrics1,tb_metrics2,mod_obj)
|
650 |
|
|
results_list<-list(data_s,data_v,tb_metrics1,tb_metrics2,mod_obj,data_month)
|
651 |
|
|
names(results_list)<-c("data_s","data_v","tb_metrics1","tb_metrics2","mod_obj","data_month")
|
652 |
e64dc6d0
|
Benoit Parmentier
|
save(results_list,file= paste(path,"/","results_list_metrics_objects_",sampling_dat$date[i],"_",sampling_dat$prop[i],"_",sampling_dat$run_samp[i],
|
653 |
|
|
out_prefix,".RData",sep=""))
|
654 |
|
|
return(results_list)
|
655 |
|
|
#return(tb_diagnostic1)
|
656 |
|
|
}
|