|
1 |
################## Functions for use in the raster prediction stage #######################################
|
|
2 |
############################ Interpolation in a given tile/region ##########################################
|
|
3 |
#This script contains 5 functions used in the interpolation of temperature in the specfied study/processing area:
|
|
4 |
# 1)predict_raster_model<-function(in_models,r_stack,out_filename)
|
|
5 |
# 2)fit_models<-function(list_formulas,data_training)
|
|
6 |
# 3)runClim_KGCAI<-function(j,list_param) : function that peforms GAM CAI method
|
|
7 |
# 4)runClim_KGFusion<-function(j,list_param) function for monthly step (climatology) in the fusion method
|
|
8 |
# 5)runGAMFusion <- function(i,list_param) : daily step for fusion method, perform daily prediction
|
|
9 |
#
|
|
10 |
#AUTHOR: Benoit Parmentier
|
|
11 |
#DATE: 10/03/2013
|
|
12 |
#PROJECT: NCEAS INPLANT: Environment and Organisms --TASK#363--
|
|
13 |
|
|
14 |
##Comments and TODO:
|
|
15 |
#This script is meant to be for general processing tile by tile or region by region.
|
|
16 |
# Note that the functions are called from GAM_fusion_analysis_raster_prediction_mutlisampling.R.
|
|
17 |
# This will be expanded to other methods.
|
|
18 |
##################################################################################################
|
|
19 |
|
|
20 |
|
|
21 |
predict_raster_model<-function(in_models,r_stack,out_filename){
|
|
22 |
#This functions performs predictions on a raster grid given input models.
|
|
23 |
#Arguments: list of fitted models, raster stack of covariates
|
|
24 |
#Output: spatial grid data frame of the subset of tiles
|
|
25 |
list_rast_pred<-vector("list",length(in_models))
|
|
26 |
for (i in 1:length(in_models)){
|
|
27 |
mod <-in_models[[i]] #accessing GAM model ojbect "j"
|
|
28 |
raster_name<-out_filename[[i]]
|
|
29 |
if (inherits(mod,"gam")) { #change to c("gam","autoKrige")
|
|
30 |
raster_pred<- predict(object=r_stack,model=mod,na.rm=FALSE,block.size=1000) #Using the coeff to predict new values.
|
|
31 |
raster_pred<- predict(object=r_stack,model=mod,na.rm=FALSE) #Using the coeff to predict new values.
|
|
32 |
names(raster_pred)<-"y_pred"
|
|
33 |
writeRaster(raster_pred, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
34 |
#print(paste("Interpolation:","mod", j ,sep=" "))
|
|
35 |
list_rast_pred[[i]]<-raster_name
|
|
36 |
}
|
|
37 |
}
|
|
38 |
if (inherits(mod,"try-error")) {
|
|
39 |
print(paste("no gam model fitted:",mod[1],sep=" ")) #change message for any model type...
|
|
40 |
}
|
|
41 |
return(list_rast_pred)
|
|
42 |
}
|
|
43 |
|
|
44 |
fit_models<-function(list_formulas,data_training){
|
|
45 |
#This functions several models and returns model objects.
|
|
46 |
#Arguments: - list of formulas for GAM models
|
|
47 |
# - fitting data in a data.frame or SpatialPointDataFrame
|
|
48 |
#Output: list of model objects
|
|
49 |
list_fitted_models<-vector("list",length(list_formulas))
|
|
50 |
for (k in 1:length(list_formulas)){
|
|
51 |
formula<-list_formulas[[k]]
|
|
52 |
mod<- try(gam(formula, data=data_training)) #change to any model!!
|
|
53 |
#mod<- try(autoKrige(formula, input_data=data_s,new_data=s_sgdf,data_variogram=data_s))
|
|
54 |
model_name<-paste("mod",k,sep="")
|
|
55 |
assign(model_name,mod)
|
|
56 |
list_fitted_models[[k]]<-mod
|
|
57 |
}
|
|
58 |
return(list_fitted_models)
|
|
59 |
}
|
|
60 |
|
|
61 |
#Function to glue all methods together...still need to separate fit and training for gwr and kriging, ok for now
|
|
62 |
interpolate_area_fun <- function(method_interp,list_models,s_raster,list_out_filename,data_df){
|
|
63 |
##Function to fit and predict an interpolation surface
|
|
64 |
##Author: Benoit Parmentier
|
|
65 |
##Function depends on other functions!!!
|
|
66 |
#inpputs:
|
|
67 |
#method_interp: interpolation method with value "gam","gwr","kriging"
|
|
68 |
#list_models: models to fit and predict as string (i.e.vector char)
|
|
69 |
#s_raster: stack with covariate variables, must match in name the data.frame input
|
|
70 |
#data_df: spatial point data.frame with covariates, must be projected match names of covariates
|
|
71 |
#list_out_filename: list of char containing output names for models
|
|
72 |
|
|
73 |
#Conver to formula object
|
|
74 |
list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
|
75 |
cname<-paste("mod",1:length(list_formulas),sep="") #change to more meaningful name?
|
|
76 |
|
|
77 |
names(list_out_filename)<-cname
|
|
78 |
|
|
79 |
##Now carry out prediction
|
|
80 |
if(method_interp=="gam"){
|
|
81 |
|
|
82 |
#First fitting
|
|
83 |
mod_list<-fit_models(list_formulas,data_df) #only gam at this stage
|
|
84 |
names(mod_list)<-cname
|
|
85 |
|
|
86 |
#if raster provided then predict surface
|
|
87 |
if(!is.null(s_raster)){
|
|
88 |
#Second predict values for raster image...by providing fitted model list, raster brick and list of output file names
|
|
89 |
rast_pred_list<-predict_raster_model(mod_list,s_raster,list_out_filename)
|
|
90 |
names(rast_pred_list)<-cname
|
|
91 |
}
|
|
92 |
}
|
|
93 |
|
|
94 |
if(method_interp%in%c("gwr","kriging")){
|
|
95 |
|
|
96 |
#Call funciton to fit and predict gwr and/or kriging
|
|
97 |
#month_prediction_obj<-predict_auto_krige_raster_model(list_formulas,s_raster,data_month,list_out_filename)
|
|
98 |
rast_prediction_obj<-predict_autokrige_gwr_raster_model(method_interp,list_formulas,s_raster,data_df,list_out_filename)
|
|
99 |
|
|
100 |
mod_list <-rast_prediction_obj$list_fitted_models
|
|
101 |
rast_pred_list <-rast_prediction_obj$list_rast_pred
|
|
102 |
names(rast_pred_list)<-cname
|
|
103 |
}
|
|
104 |
|
|
105 |
#Now prepare to return object
|
|
106 |
interp_area_obj <-list(mod_list,list_formulas,rast_pred_list)
|
|
107 |
names(interp_area_obj) <- c("mod_list","list_formulas","rast_pred_list")
|
|
108 |
return(interp_area_obj)
|
|
109 |
}
|
|
110 |
|
|
111 |
####
|
|
112 |
#TODO:
|
|
113 |
#Add log file and calculate time and sizes for processes-outputs
|
|
114 |
#Can combine runClim_KGFusion and runClim_KGCAI
|
|
115 |
runClim_KGCAI <-function(j,list_param){
|
|
116 |
|
|
117 |
#Make this a function with multiple argument that can be used by mcmapply??
|
|
118 |
#Arguments:
|
|
119 |
#1)list_index: j
|
|
120 |
#2)covar_rast: covariates raster images used in the modeling
|
|
121 |
#3)covar_names: names of input variables
|
|
122 |
#4)lst_avg: list of LST climatogy names, may be removed later on
|
|
123 |
#5)list_models: list input models for bias calculation
|
|
124 |
#6)dst: data at the monthly time scale
|
|
125 |
#7)var: TMAX or TMIN, variable being interpolated
|
|
126 |
#8)y_var_name: output name, not used at this stage
|
|
127 |
#9)out_prefix
|
|
128 |
#10) out_path
|
|
129 |
|
|
130 |
#The output is a list of four shapefile names produced by the function:
|
|
131 |
#1) clim: list of output names for raster climatogies
|
|
132 |
#2) data_month: monthly training data for bias surface modeling
|
|
133 |
#3) mod: list of model objects fitted
|
|
134 |
#4) formulas: list of formulas used in bias modeling
|
|
135 |
|
|
136 |
### PARSING INPUT ARGUMENTS
|
|
137 |
#list_param_runGAMFusion<-list(i,clim_yearlist,sampling_obj,var,y_var_name, out_prefix)
|
|
138 |
|
|
139 |
index<-list_param$j
|
|
140 |
s_raster<-list_param$covar_rast
|
|
141 |
covar_names<-list_param$covar_names
|
|
142 |
lst_avg<-list_param$lst_avg
|
|
143 |
list_models<-list_param$list_models
|
|
144 |
dst<-list_param$dst #monthly station dataset
|
|
145 |
var<-list_param$var
|
|
146 |
y_var_name<-list_param$y_var_name
|
|
147 |
out_prefix<-list_param$out_prefix
|
|
148 |
out_path<-list_param$out_path
|
|
149 |
|
|
150 |
#inserted #
|
|
151 |
sampling_month_obj<-list_param$sampling_month_obj
|
|
152 |
ghcn.month.subsets<-sampling_month_obj$ghcn_data
|
|
153 |
sampling_month_dat <- sampling_month_obj$sampling_dat
|
|
154 |
sampling_month_index <- sampling_month_obj$sampling_index
|
|
155 |
|
|
156 |
#Model and response variable can be changed without affecting the script
|
|
157 |
#prop_month<-0 #proportion retained for validation...
|
|
158 |
#run_samp<-1 #sample number, can be introduced later...
|
|
159 |
|
|
160 |
prop_month <- sampling_month_dat$prop[j] #proportion retained for validation...
|
|
161 |
run_samp <- sampling_month_dat$run_samp[j] #sample number if multisampling...will need create mulitple prediction at daily!!! could be complicated
|
|
162 |
#possibility is to average per proportion !!!
|
|
163 |
|
|
164 |
date_month <-strptime(sampling_month_dat$date[j], "%Y%m%d") # interpolation date being processed
|
|
165 |
month_no <-strftime(date_month, "%m") # current month of the date being processed
|
|
166 |
LST_month<-paste("mm_",month_no,sep="") # name of LST month to be matched
|
|
167 |
LST_name <-LST_month
|
|
168 |
#### STEP 2: PREPARE DATA
|
|
169 |
|
|
170 |
#change here...use training data...
|
|
171 |
###Regression part 1: Creating a validation dataset by creating training and testing datasets
|
|
172 |
|
|
173 |
#LST_name <-lst_avg[j] # name of LST month to be matched
|
|
174 |
#data_month$LST<-data_month[[LST_name]]
|
|
175 |
|
|
176 |
dataset_month <-ghcn.month.subsets[[j]]
|
|
177 |
mod_LST <- ghcn.month.subsets[[j]][,match(LST_month, names(ghcn.month.subsets[[j]]))] #Match interpolation date and monthly LST average
|
|
178 |
dataset_month$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the dataset
|
|
179 |
#change here...
|
|
180 |
dst$LST<-dst[[LST_month]] #Add the variable LST to the monthly dataset
|
|
181 |
proj_str<-proj4string(dst) #get the local projection information from monthly data
|
|
182 |
|
|
183 |
#TMax to model..., add precip later
|
|
184 |
if (var=="TMAX"){
|
|
185 |
dataset_month$y_var<-dataset_month$TMax #Adding TMax as the variable modeled
|
|
186 |
}
|
|
187 |
if (var=="TMIN"){
|
|
188 |
dataset_month$y_var<-dataset_month$TMin #Adding TMin as the variable modeled
|
|
189 |
}
|
|
190 |
|
|
191 |
ind.training <- sampling_month_index[[j]]
|
|
192 |
ind.testing <- setdiff(1:nrow(dataset_month), ind.training)
|
|
193 |
data_month_s <- dataset_month[ind.training, ] #Training dataset currently used in the modeling
|
|
194 |
data_month_v <- dataset_month[ind.testing, ] #Testing/validation dataset using input sampling
|
|
195 |
|
|
196 |
data_month <- data_month_s #training data for monthhly predictions...
|
|
197 |
|
|
198 |
#date_proc<-strptime(sampling_dat$date[i], "%Y%m%d") # interpolation date being processed
|
|
199 |
#mo<-as.integer(strftime(date_proc, "%m")) # current month of the date being processed
|
|
200 |
#day<-as.integer(strftime(date_proc, "%d"))
|
|
201 |
#year<-as.integer(strftime(date_proc, "%Y"))
|
|
202 |
## end of pasted
|
|
203 |
|
|
204 |
#end of insert...
|
|
205 |
|
|
206 |
#Fit gam models using data and list of formulas
|
|
207 |
|
|
208 |
list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
|
209 |
cname<-paste("mod",1:length(list_formulas),sep="") #change to more meaningful name?
|
|
210 |
|
|
211 |
#mod_list<-fit_models(list_formulas,data_month) #only gam at this stage
|
|
212 |
#cname<-paste("mod",1:length(mod_list),sep="") #change to more meaningful name?
|
|
213 |
|
|
214 |
#Adding layer LST to the raster stack
|
|
215 |
|
|
216 |
pos<-match("LST",names(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
|
217 |
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
|
218 |
LST<-subset(s_raster,LST_name)
|
|
219 |
names(LST)<-"LST"
|
|
220 |
s_raster<-addLayer(s_raster,LST) #Adding current month
|
|
221 |
|
|
222 |
#Now generate file names for the predictions...
|
|
223 |
list_out_filename<-vector("list",length(list_formulas))
|
|
224 |
names(list_out_filename)<-cname
|
|
225 |
|
|
226 |
for (k in 1:length(list_out_filename)){
|
|
227 |
#j indicate which month is predicted
|
|
228 |
data_name<-paste(var,"_clim_month_",as.integer(month_no),"_",cname[k],"_",prop_month,
|
|
229 |
"_",run_samp,sep="")
|
|
230 |
raster_name<-file.path(out_path,paste("CAI_",data_name,out_prefix,".tif", sep=""))
|
|
231 |
list_out_filename[[k]]<-raster_name
|
|
232 |
}
|
|
233 |
|
|
234 |
## Select the relevant method...
|
|
235 |
|
|
236 |
if (interpolation_method=="gam_CAI"){
|
|
237 |
|
|
238 |
#First fitting
|
|
239 |
mod_list<-fit_models(list_formulas,data_month) #only gam at this stage
|
|
240 |
names(mod_list)<-cname
|
|
241 |
|
|
242 |
#Second predict values for raster image...by providing fitted model list, raster brick and list of output file names
|
|
243 |
#now predict values for raster image...
|
|
244 |
rast_clim_list<-predict_raster_model(mod_list,s_raster,list_out_filename)
|
|
245 |
names(rast_clim_list)<-cname
|
|
246 |
#Some models will not be predicted because of the lack of training data...remove empty string from list of models
|
|
247 |
|
|
248 |
}
|
|
249 |
|
|
250 |
|
|
251 |
if (interpolation_method %in% c("kriging_CAI","gwr_CAI")){
|
|
252 |
if(interpolation_method=="kriging_CAI"){
|
|
253 |
method_interp <- "kriging"
|
|
254 |
}else{
|
|
255 |
method_interp <- "gwr"
|
|
256 |
}
|
|
257 |
#Call function to fit and predict gwr and/or kriging
|
|
258 |
#month_prediction_obj<-predict_auto_krige_raster_model(list_formulas,s_raster,data_month,list_out_filename)
|
|
259 |
month_prediction_obj<-predict_autokrige_gwr_raster_model(method_interp,list_formulas,s_raster,data_month,list_out_filename)
|
|
260 |
|
|
261 |
mod_list <-month_prediction_obj$list_fitted_models
|
|
262 |
rast_clim_list <-month_prediction_obj$list_rast_pred
|
|
263 |
names(rast_clim_list)<-cname
|
|
264 |
}
|
|
265 |
|
|
266 |
rast_clim_list<-rast_clim_list[!sapply(rast_clim_list,is.null)] #remove NULL elements in list
|
|
267 |
|
|
268 |
#Adding Kriging for Climatology options
|
|
269 |
|
|
270 |
clim_xy<-coordinates(data_month)
|
|
271 |
fitclim<-Krig(clim_xy,data_month$y_var,theta=1e5) #use TPS or krige
|
|
272 |
#fitclim<-Krig(clim_xy,data_month$TMax,theta=1e5) #use TPS or krige
|
|
273 |
mod_krtmp1<-fitclim
|
|
274 |
model_name<-"mod_kr"
|
|
275 |
|
|
276 |
clim_rast<-interpolate(LST,fitclim) #interpolation using function from raster package
|
|
277 |
|
|
278 |
#Write out modeled layers
|
|
279 |
data_name<-paste(var,"_clim_month_",as.integer(month_no),"_",model_name,"_",prop_month,
|
|
280 |
"_",run_samp,sep="")
|
|
281 |
raster_name_clim<-file.path(out_path,paste("CAI_",data_name,out_prefix,".tif", sep=""))
|
|
282 |
writeRaster(clim_rast, filename=raster_name_clim,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
283 |
|
|
284 |
#Adding to current objects
|
|
285 |
mod_list[[model_name]]<-mod_krtmp1
|
|
286 |
#rast_bias_list[[model_name]]<-raster_name_bias
|
|
287 |
rast_clim_list[[model_name]]<-raster_name_clim
|
|
288 |
|
|
289 |
#Prepare object to return
|
|
290 |
clim_obj<-list(rast_clim_list,data_month,data_month_v,sampling_month_dat[j,],mod_list,list_formulas)
|
|
291 |
names(clim_obj)<-c("clim","data_month","data_month_v","sampling_month_dat","mod","formulas")
|
|
292 |
|
|
293 |
save(clim_obj,file= file.path(out_path,paste("clim_obj_CAI_month_",as.integer(month_no),"_",var,"_",prop_month,
|
|
294 |
"_",run_samp,"_",out_prefix,".RData",sep="")))
|
|
295 |
|
|
296 |
return(clim_obj)
|
|
297 |
}
|
|
298 |
#
|
|
299 |
|
|
300 |
runClim_KGFusion<-function(j,list_param){
|
|
301 |
|
|
302 |
#Make this a function with multiple argument that can be used by mcmapply??
|
|
303 |
#Arguments:
|
|
304 |
#1)list_index: j
|
|
305 |
#2)covar_rast: covariates raster images used in the modeling
|
|
306 |
#3)covar_names: names of input variables
|
|
307 |
#4)lst_avg: list of LST climatogy names, may be removed later on
|
|
308 |
#5)list_models: list input models for bias calculation
|
|
309 |
#6)dst: data at the monthly time scale
|
|
310 |
#7)var: TMAX or TMIN, variable being interpolated
|
|
311 |
#8)y_var_name: output name, not used at this stage
|
|
312 |
#9)out_prefix
|
|
313 |
#
|
|
314 |
#The output is a list of four shapefile names produced by the function:
|
|
315 |
#1) clim: list of output names for raster climatogies
|
|
316 |
#2) data_month: monthly training data for bias surface modeling
|
|
317 |
#3) mod: list of model objects fitted
|
|
318 |
#4) formulas: list of formulas used in bias modeling
|
|
319 |
|
|
320 |
### PARSING INPUT ARGUMENTS
|
|
321 |
#list_param_runGAMFusion<-list(i,clim_yearlist,sampling_obj,var,y_var_name, out_prefix)
|
|
322 |
|
|
323 |
index<-list_param$j
|
|
324 |
s_raster<-list_param$covar_rast
|
|
325 |
covar_names<-list_param$covar_names
|
|
326 |
lst_avg<-list_param$lst_avg
|
|
327 |
list_models<-list_param$list_models
|
|
328 |
dst<-list_param$dst #monthly station dataset
|
|
329 |
var<-list_param$var
|
|
330 |
y_var_name<-list_param$y_var_name
|
|
331 |
out_prefix<-list_param$out_prefix
|
|
332 |
out_path<-list_param$out_path
|
|
333 |
|
|
334 |
#inserted #
|
|
335 |
sampling_month_obj<-list_param$sampling_month_obj
|
|
336 |
ghcn.month.subsets<-sampling_month_obj$ghcn_data
|
|
337 |
sampling_month_dat <- sampling_month_obj$sampling_dat
|
|
338 |
sampling_month_index <- sampling_month_obj$sampling_index
|
|
339 |
|
|
340 |
#Model and response variable can be changed without affecting the script
|
|
341 |
#prop_month<-0 #proportion retained for validation...
|
|
342 |
#run_samp<-1 #sample number, can be introduced later...
|
|
343 |
|
|
344 |
prop_month <- sampling_month_dat$prop[j] #proportion retained for validation...
|
|
345 |
run_samp <- sampling_month_dat$run_samp[j] #sample number if multisampling...
|
|
346 |
#will need create mulitple prediction at daily!!! could be complicated
|
|
347 |
#possibility is to average per proportion !!!
|
|
348 |
|
|
349 |
date_month <-strptime(sampling_month_dat$date[j], "%Y%m%d") # interpolation date being processed
|
|
350 |
month_no <-strftime(date_month, "%m") # current month of the date being processed
|
|
351 |
LST_month<-paste("mm_",month_no,sep="") # name of LST month to be matched
|
|
352 |
LST_name <-LST_month
|
|
353 |
#### STEP 2: PREPARE DATA
|
|
354 |
|
|
355 |
#change here...use training data...
|
|
356 |
###Regression part 1: Creating a validation dataset by creating training and testing datasets
|
|
357 |
|
|
358 |
#LST_name <-lst_avg[j] # name of LST month to be matched
|
|
359 |
#data_month$LST<-data_month[[LST_name]]
|
|
360 |
|
|
361 |
dataset_month <-ghcn.month.subsets[[j]]
|
|
362 |
mod_LST <- ghcn.month.subsets[[j]][,match(LST_month, names(ghcn.month.subsets[[j]]))] #Match interpolation date and monthly LST average
|
|
363 |
dataset_month$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the dataset
|
|
364 |
#change here...
|
|
365 |
dst$LST<-dst[[LST_month]] #Add the variable LST to the monthly dataset
|
|
366 |
proj_str<-proj4string(dst) #get the local projection information from monthly data
|
|
367 |
|
|
368 |
ind.training <- sampling_month_index[[j]]
|
|
369 |
ind.testing <- setdiff(1:nrow(dataset_month), ind.training)
|
|
370 |
data_month_s <- dataset_month[ind.training, ] #Training dataset currently used in the modeling
|
|
371 |
data_month_v <- dataset_month[ind.testing, ] #Testing/validation dataset using input sampling
|
|
372 |
|
|
373 |
data_month <- data_month_s #training data for monthhly predictions...
|
|
374 |
|
|
375 |
#date_proc<-strptime(sampling_dat$date[i], "%Y%m%d") # interpolation date being processed
|
|
376 |
#mo<-as.integer(strftime(date_proc, "%m")) # current month of the date being processed
|
|
377 |
#day<-as.integer(strftime(date_proc, "%d"))
|
|
378 |
#year<-as.integer(strftime(date_proc, "%Y"))
|
|
379 |
## end of pasted
|
|
380 |
|
|
381 |
#end of insert...09/04
|
|
382 |
|
|
383 |
#### STEP 2: PREPARE DATA
|
|
384 |
|
|
385 |
#data_month<-dst[dst$month==j,] #Subsetting dataset for the relevant month of the date being processed
|
|
386 |
#LST_name<-lst_avg[j] # name of LST month to be matched
|
|
387 |
#data_month$LST<-data_month[[LST_name]]
|
|
388 |
|
|
389 |
#Adding layer LST to the raster stack
|
|
390 |
covar_rast<-s_raster
|
|
391 |
#names(s_raster)<-covar_names
|
|
392 |
pos<-match("LST",names(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
|
393 |
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
|
394 |
LST<-subset(s_raster,LST_name)
|
|
395 |
names(LST)<-"LST"
|
|
396 |
s_raster<-addLayer(s_raster,LST) #Adding current month
|
|
397 |
|
|
398 |
#LST bias to model...
|
|
399 |
if (var=="TMAX"){
|
|
400 |
data_month$LSTD_bias<-data_month$LST-data_month$TMax
|
|
401 |
data_month$y_var<-data_month$LSTD_bias #Adding bias as the variable modeled
|
|
402 |
}
|
|
403 |
if (var=="TMIN"){
|
|
404 |
data_month$LSTD_bias<-data_month$LST-data_month$TMin
|
|
405 |
data_month$y_var<-data_month$LSTD_bias #Adding bias as the variable modeled
|
|
406 |
}
|
|
407 |
|
|
408 |
#If CAI model then...
|
|
409 |
#TMax to model..., add precip later
|
|
410 |
#if (var=="TMAX"){
|
|
411 |
# dataset_month$y_var<-dataset_month$TMax #Adding TMax as the variable modeled
|
|
412 |
#}
|
|
413 |
#if (var=="TMIN"){
|
|
414 |
# dataset_month$y_var<-dataset_month$TMin #Adding TMin as the variable modeled
|
|
415 |
#}
|
|
416 |
|
|
417 |
#### STEP3: NOW FIT AND PREDICT MODEL
|
|
418 |
|
|
419 |
list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
|
420 |
cname<-paste("mod",1:length(list_formulas),sep="") #change to more meaningful name?
|
|
421 |
|
|
422 |
#Now generate file names for the predictions...
|
|
423 |
list_out_filename<-vector("list",length(list_formulas))
|
|
424 |
names(list_out_filename)<-cname
|
|
425 |
|
|
426 |
##Change name...
|
|
427 |
for (k in 1:length(list_out_filename)){
|
|
428 |
#j indicate which month is predicted, var indicates TMIN or TMAX
|
|
429 |
data_name<-paste(var,"_bias_LST_month_",as.integer(month_no),"_",cname[k],"_",prop_month,
|
|
430 |
"_",run_samp,sep="")
|
|
431 |
raster_name<-file.path(out_path,paste("fusion_",interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
|
432 |
list_out_filename[[k]]<-raster_name
|
|
433 |
}
|
|
434 |
|
|
435 |
#for (k in 1:length(list_out_filename)){
|
|
436 |
# #j indicate which month is predicted
|
|
437 |
# data_name<-paste(var,"_clim_month_",as.integer(month_no),"_",cname[k],"_",prop_month,
|
|
438 |
# "_",run_samp,sep="")
|
|
439 |
# raster_name<-file.path(out_path,paste("CAI_",data_name,out_prefix,".tif", sep=""))
|
|
440 |
# list_out_filename[[k]]<-raster_name
|
|
441 |
#}
|
|
442 |
|
|
443 |
## Select the relevant method...
|
|
444 |
|
|
445 |
if (interpolation_method=="gam_fusion"){
|
|
446 |
#First fitting
|
|
447 |
mod_list<-fit_models(list_formulas,data_month) #only gam at this stage
|
|
448 |
names(mod_list)<-cname
|
|
449 |
|
|
450 |
#Second predict values for raster image...by providing fitted model list, raster brick and list of output file names
|
|
451 |
rast_bias_list<-predict_raster_model(mod_list,s_raster,list_out_filename)
|
|
452 |
names(rast_bias_list)<-cname
|
|
453 |
}
|
|
454 |
|
|
455 |
if (interpolation_method %in% c("kriging_fusion","gwr_fusion")){
|
|
456 |
if(interpolation_method=="kriging_fusion"){
|
|
457 |
method_interp <- "kriging"
|
|
458 |
}else{
|
|
459 |
method_interp <- "gwr"
|
|
460 |
}
|
|
461 |
#Call funciton to fit and predict gwr and/or kriging
|
|
462 |
#month_prediction_obj<-predict_auto_krige_raster_model(list_formulas,s_raster,data_month,list_out_filename)
|
|
463 |
month_prediction_obj<-predict_autokrige_gwr_raster_model(method_interp,list_formulas,s_raster,data_month,list_out_filename)
|
|
464 |
|
|
465 |
mod_list <-month_prediction_obj$list_fitted_models
|
|
466 |
rast_bias_list <-month_prediction_obj$list_rast_pred
|
|
467 |
names(rast_bias_list)<-cname
|
|
468 |
}
|
|
469 |
|
|
470 |
#Some modles will not be predicted...remove them
|
|
471 |
rast_bias_list<-rast_bias_list[!sapply(rast_bias_list,is.null)] #remove NULL elements in list
|
|
472 |
|
|
473 |
mod_rast<-stack(rast_bias_list) #stack of bias raster images from models
|
|
474 |
|
|
475 |
rast_clim_list<-vector("list",nlayers(mod_rast))
|
|
476 |
|
|
477 |
|
|
478 |
names(rast_clim_list)<-names(rast_bias_list)
|
|
479 |
|
|
480 |
for (k in 1:nlayers(mod_rast)){
|
|
481 |
clim_fus_rast<-LST-subset(mod_rast,k)
|
|
482 |
data_name<-paste(var,"_clim_LST_month_",as.integer(month_no),"_",names(rast_clim_list)[k],"_",prop_month,
|
|
483 |
"_",run_samp,sep="")
|
|
484 |
raster_name<-file.path(out_path,paste("fusion_",interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
|
485 |
rast_clim_list[[k]]<-raster_name
|
|
486 |
writeRaster(clim_fus_rast, filename=raster_name,overwrite=TRUE) #Wri
|
|
487 |
}
|
|
488 |
|
|
489 |
#### STEP 4:Adding Kriging for Climatology options
|
|
490 |
bias_xy<-coordinates(data_month)
|
|
491 |
#fitbias<-Krig(bias_xy,data_month$LSTD_bias,theta=1e5) #use TPS or krige
|
|
492 |
fitbias<-try(Krig(bias_xy,data_month$LSTD_bias,theta=1e5)) #use TPS or krige
|
|
493 |
|
|
494 |
model_name<-"mod_kr"
|
|
495 |
|
|
496 |
if (inherits(fitbias,"Krig")){
|
|
497 |
#Saving kriged surface in raster images
|
|
498 |
bias_rast<-bias_rast<-interpolate(LST,fitbias) #interpolation using function from raster package
|
|
499 |
data_name<-paste(var,"_bias_LST_month_",as.integer(month_no),"_",model_name,"_",prop_month,
|
|
500 |
"_",run_samp,sep="")
|
|
501 |
raster_name_bias<-file.path(out_path,paste("fusion_",interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
|
502 |
writeRaster(bias_rast, filename=raster_name_bias,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
503 |
|
|
504 |
#now climatology layer
|
|
505 |
clim_rast<-LST-bias_rast
|
|
506 |
data_name<-paste(var,"_clim_LST_month_",as.integer(month_no),"_",model_name,"_",prop_month,
|
|
507 |
"_",run_samp,sep="")
|
|
508 |
raster_name_clim<-file.path(out_path,paste("fusion_",interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
|
509 |
writeRaster(clim_rast, filename=raster_name_clim,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
510 |
#Adding to current objects
|
|
511 |
mod_list[[model_name]]<-fitbias
|
|
512 |
rast_bias_list[[model_name]]<-raster_name_bias
|
|
513 |
rast_clim_list[[model_name]]<-raster_name_clim
|
|
514 |
}
|
|
515 |
|
|
516 |
if (inherits(fitbias,"try-error")){
|
|
517 |
#NEED TO DEAL WITH THIS!!!
|
|
518 |
|
|
519 |
#Adding to current objects
|
|
520 |
mod_list[[model_name]]<-NULL
|
|
521 |
rast_bias_list[[model_name]]<-NULL
|
|
522 |
rast_clim_list[[model_name]]<-NULL
|
|
523 |
}
|
|
524 |
|
|
525 |
#### STEP 5: Prepare object and return
|
|
526 |
#Prepare object to return
|
|
527 |
clim_obj<-list(rast_bias_list,rast_clim_list,data_month,data_month_v,sampling_month_dat[j,],mod_list,list_formulas)
|
|
528 |
names(clim_obj)<-c("bias","clim","data_month","data_month_v","sampling_month_dat","mod","formulas")
|
|
529 |
|
|
530 |
save(clim_obj,file= file.path(out_path,paste("clim_obj_fusion_month_",as.integer(month_no),"_",var,"_",prop_month,
|
|
531 |
"_",run_samp,"_",out_prefix,".RData",sep="")))
|
|
532 |
return(clim_obj)
|
|
533 |
}
|
|
534 |
|
|
535 |
## Run function for kriging...?
|
|
536 |
|
|
537 |
#runGAMFusion <- function(i,list_param) { # loop over dates
|
|
538 |
run_prediction_daily_deviation <- function(i,list_param) { # loop over dates
|
|
539 |
#This function produce daily prediction using monthly predicted clim surface.
|
|
540 |
#The output is both daily prediction and daily deviation from monthly steps.
|
|
541 |
|
|
542 |
#### Change this to allow explicitly arguments...
|
|
543 |
#Arguments:
|
|
544 |
#1)index: loop list index for individual run/fit
|
|
545 |
#2)clim_year_list: list of climatology files for all models...(12*nb of models)
|
|
546 |
#3)sampling_obj: contains, data per date/fit, sampling information
|
|
547 |
#4)dst: data at the monthly time scale
|
|
548 |
#5)var: variable predicted -TMAX or TMIN
|
|
549 |
#6)y_var_name: name of the variable predicted - dailyTMax, dailyTMin
|
|
550 |
#7)out_prefix
|
|
551 |
#8)out_path
|
|
552 |
#9)list_models2 : interpolation model's formulas as string
|
|
553 |
#10)interp_methods2: "gam","gwr","kriging"
|
|
554 |
#11)s_raster: stack for covariates and toher variables
|
|
555 |
|
|
556 |
#The output is a list of four shapefile names produced by the function:
|
|
557 |
#1) list_temp: y_var_name
|
|
558 |
#2) rast_clim_list: list of files for temperature climatology predictions
|
|
559 |
#3) delta: list of files for temperature delta predictions
|
|
560 |
#4) data_s: training data
|
|
561 |
#5) data_v: testing data
|
|
562 |
#6) sampling_dat: sampling information for the current prediction (date,proportion of holdout and sample number)
|
|
563 |
#7) mod_kr: kriging delta fit, field package model object
|
|
564 |
|
|
565 |
### PARSING INPUT ARGUMENTS
|
|
566 |
|
|
567 |
#list_param_runGAMFusion<-list(i,clim_yearlist,sampling_obj,var,y_var_name, out_prefix)
|
|
568 |
rast_clim_yearlist<-list_param$clim_yearlist
|
|
569 |
sampling_obj<-list_param$sampling_obj
|
|
570 |
ghcn.subsets<-sampling_obj$ghcn_data
|
|
571 |
sampling_dat <- sampling_obj$sampling_dat
|
|
572 |
sampling <- sampling_obj$sampling_index
|
|
573 |
var<-list_param$var
|
|
574 |
y_var_name<-list_param$y_var_name
|
|
575 |
out_prefix<-list_param$out_prefix
|
|
576 |
dst<-list_param$dst #monthly station dataset
|
|
577 |
out_path <-list_param$out_path
|
|
578 |
list_models2 <-list_param$list_models2
|
|
579 |
interp_method2 <- list_param$interp_method2
|
|
580 |
s_raster <- list_param$s_raster
|
|
581 |
|
|
582 |
sampling_month_obj <- list_param$sampling_month_obj
|
|
583 |
daily_dev_sampling_dat <- list_param$daily_dev_sampling_dat
|
|
584 |
|
|
585 |
index_d <- daily_dev_sampling_dat$index_d[i]
|
|
586 |
index_m <- daily_dev_sampling_dat$index_m[i]
|
|
587 |
|
|
588 |
use_clim_image <- list_param$use_clim_image # use predicted image as a base...rather than average Tmin at the station for delta
|
|
589 |
join_daily <- list_param$join_daily # join monthly and daily station before calucating delta
|
|
590 |
|
|
591 |
#use_clim_image
|
|
592 |
##########
|
|
593 |
# STEP 1 - Read in information and get traing and testing stations
|
|
594 |
#############
|
|
595 |
|
|
596 |
#use index_d and index_m
|
|
597 |
|
|
598 |
date<-strptime(daily_dev_sampling_dat$date[i], "%Y%m%d") # interpolation date being processed
|
|
599 |
month<-strftime(date, "%m") # current month of the date being processed
|
|
600 |
LST_month<-paste("mm_",month,sep="") # name of LST month to be matched
|
|
601 |
proj_str<-proj4string(dst) #get the local projection information from monthly data
|
|
602 |
|
|
603 |
###Regression part 1: Creating a validation dataset by creating training and testing datasets
|
|
604 |
|
|
605 |
data_day<-ghcn.subsets[[index_d]]
|
|
606 |
mod_LST <- ghcn.subsets[[index_d]][,match(LST_month, names(ghcn.subsets[[index_d]]))] #Match interpolation date and monthly LST average
|
|
607 |
data_day$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the dataset
|
|
608 |
dst$LST<-dst[[LST_month]] #Add the variable LST to the monthly dataset
|
|
609 |
|
|
610 |
ind.training<-sampling[[index_d]]
|
|
611 |
ind.testing <- setdiff(1:nrow(data_day), ind.training)
|
|
612 |
data_s <- data_day[ind.training, ] #Training dataset currently used in the modeling
|
|
613 |
data_v <- data_day[ind.testing, ] #Testing/validation dataset using input sampling
|
|
614 |
|
|
615 |
ns<-nrow(data_s)
|
|
616 |
nv<-nrow(data_v)
|
|
617 |
#i=1
|
|
618 |
date_proc<-sampling_dat$date[index_d]
|
|
619 |
date_proc<-strptime(sampling_dat$date[index_d], "%Y%m%d") # interpolation date being processed
|
|
620 |
mo<-as.integer(strftime(date_proc, "%m")) # current month of the date being processed
|
|
621 |
day<-as.integer(strftime(date_proc, "%d"))
|
|
622 |
year<-as.integer(strftime(date_proc, "%Y"))
|
|
623 |
|
|
624 |
#Adding layer LST to the raster stack
|
|
625 |
#names(s_raster)<-covar_names
|
|
626 |
pos<-match("LST",names(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
|
627 |
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
|
628 |
LST<-subset(s_raster,LST_month)
|
|
629 |
names(LST)<-"LST"
|
|
630 |
s_raster<-addLayer(s_raster,LST) #Adding current month
|
|
631 |
|
|
632 |
#Now get monthly data...
|
|
633 |
|
|
634 |
ghcn.month.subsets<-sampling_month_obj$ghcn_data
|
|
635 |
sampling_month_dat <- sampling_month_obj$sampling_dat
|
|
636 |
sampling_month_index <- sampling_month_obj$sampling_index
|
|
637 |
|
|
638 |
dataset_month <-ghcn.month.subsets[[index_m]]
|
|
639 |
mod_LST <- ghcn.month.subsets[[index_m]][,match(LST_month, names(ghcn.month.subsets[[index_m]]))] #Match interpolation date and monthly LST average
|
|
640 |
dataset_month$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the dataset
|
|
641 |
#change here...
|
|
642 |
dst$LST<-dst[[LST_month]] #Add the variable LST to the monthly dataset
|
|
643 |
proj_str<-proj4string(dst) #get the local projection information from monthly data
|
|
644 |
|
|
645 |
ind.training_month <- sampling_month_index[[index_m]]
|
|
646 |
ind.testing_month <- setdiff(1:nrow(dataset_month), ind.training_month)
|
|
647 |
data_month_s <- dataset_month[ind.training_month, ] #Training dataset currently used in the modeling
|
|
648 |
data_month_v <- dataset_month[ind.testing_month, ] #Testing/validation dataset using input sampling
|
|
649 |
|
|
650 |
modst <- data_month_s #training data for monthhly predictions...
|
|
651 |
|
|
652 |
##########
|
|
653 |
# STEP 2 - CLEAN DATA AND JOIN DAILY TO MONTHLY STATION INFORMATION
|
|
654 |
##########
|
|
655 |
|
|
656 |
#if use join
|
|
657 |
#modst<-dst[dst$month==mo,] #Subsetting dataset for the relevant month of the date being processed
|
|
658 |
|
|
659 |
if (var=="TMIN"){
|
|
660 |
modst$LSTD_bias <- modst$LST-modst$TMin; #That is the difference between the monthly LST mean and monthly station mean
|
|
661 |
}
|
|
662 |
if (var=="TMAX"){
|
|
663 |
modst$LSTD_bias <- modst$LST-modst$TMax; #That is the difference between the monthly LST mean and monthly station mean
|
|
664 |
}
|
|
665 |
#This may be unnecessary since LSTD_bias is already in dst?? check the info
|
|
666 |
#Some loss of observations: LSTD_bias for January has only 56 out of 66 possible TMIN!!! We may need to look into this issue
|
|
667 |
#to avoid some losses of station data...
|
|
668 |
|
|
669 |
#Clearn out this part: make this a function call
|
|
670 |
x<-as.data.frame(data_v)
|
|
671 |
d<-as.data.frame(data_s)
|
|
672 |
for (j in 1:nrow(x)){
|
|
673 |
if (x$value[j]== -999.9){
|
|
674 |
x$value[j]<-NA
|
|
675 |
}
|
|
676 |
}
|
|
677 |
for (j in 1:nrow(d)){
|
|
678 |
if (d$value[j]== -999.9){
|
|
679 |
d$value[j]<-NA
|
|
680 |
}
|
|
681 |
}
|
|
682 |
pos<-match("value",names(d)) #Find column with name "value"
|
|
683 |
#names(d)[pos]<-c("dailyTmax")
|
|
684 |
names(d)[pos]<-y_var_name
|
|
685 |
pos<-match("value",names(x)) #Find column with name "value"
|
|
686 |
names(x)[pos]<-y_var_name
|
|
687 |
pos<-match("station",names(d)) #Find column with station ID
|
|
688 |
names(d)[pos]<-c("id")
|
|
689 |
pos<-match("station",names(x)) #Find column with name station ID
|
|
690 |
names(x)[pos]<-c("id")
|
|
691 |
pos<-match("station",names(modst)) #Find column with name station ID
|
|
692 |
names(modst)[pos]<-c("id") #modst contains the average tmax per month for every stations...
|
|
693 |
|
|
694 |
##########
|
|
695 |
# STEP 3 - interpolate daily delta across space
|
|
696 |
##########
|
|
697 |
|
|
698 |
#if used images
|
|
699 |
# extract from image
|
|
700 |
#Change to take into account TMin and TMax
|
|
701 |
|
|
702 |
if(use_clim_image==FALSE){
|
|
703 |
|
|
704 |
#must join daily and monthly data first...
|
|
705 |
|
|
706 |
dmoday <-merge(modst,d,by="id",suffixes=c("",".y2"))
|
|
707 |
xmoday <-merge(modst,x,by="id",suffixes=c("",".y2"))
|
|
708 |
mod_pat<-glob2rx("*.y2") #remove duplicate columns that have ".y2" in their names
|
|
709 |
var_pat<-grep(mod_pat,names(dmoday),value=FALSE) # using grep with "value" extracts the matching names
|
|
710 |
dmoday<-dmoday[,-var_pat] #dropping relevant columns
|
|
711 |
mod_pat<-glob2rx("*.y2")
|
|
712 |
var_pat<-grep(mod_pat,names(xmoday),value=FALSE) # using grep with "value" extracts the matching names
|
|
713 |
xmoday<-xmoday[,-var_pat] #Removing duplicate columns
|
|
714 |
|
|
715 |
data_v<-xmoday
|
|
716 |
|
|
717 |
#coords <-dmoday[,c("coords.x1","coords.x2")]
|
|
718 |
coords <-dmoday[,c("x","y")]
|
|
719 |
coordinates(dmoday)<-coords
|
|
720 |
proj4string(dmoday)<-proj_str
|
|
721 |
|
|
722 |
#dmoday contains the daily tmax values for training with TMax/TMin being the monthly station tmax/tmin mean
|
|
723 |
#xmoday contains the daily tmax values for validation with TMax/TMin being the monthly station tmax/tmin mean
|
|
724 |
|
|
725 |
if (var=="TMIN"){
|
|
726 |
daily_delta <-dmoday$dailyTmin-dmoday$TMin #daily detl is the difference between monthly and daily temperatures
|
|
727 |
}
|
|
728 |
if (var=="TMAX"){
|
|
729 |
daily_delta <- dmoday$dailyTmax-dmoday$TMax
|
|
730 |
}
|
|
731 |
#daily_delta <- dmoday[[y_var_name]] -
|
|
732 |
#only one delta in this case!!!
|
|
733 |
#list(mod)
|
|
734 |
|
|
735 |
if(is.null(list_models2)){ #change here...
|
|
736 |
|
|
737 |
list_daily_delta_rast <- vector("list",length=1) #only one delta surface in this case!!
|
|
738 |
list_mod_krtmp2 <- vector("list",length=1) #only one delta model in this case!!
|
|
739 |
|
|
740 |
model_name<-paste("mod_stat_kr",sep="_")
|
|
741 |
daily_delta_xy<-as.matrix(cbind(dmoday$x,dmoday$y))
|
|
742 |
fitdelta<-Krig(daily_delta_xy,daily_delta,theta=1e5) #use TPS or krige
|
|
743 |
mod_krtmp2 <- fitdelta
|
|
744 |
#names(mod_krtmp2)[k] <- model_name
|
|
745 |
#data_s$daily_delta<-daily_delta
|
|
746 |
#rast_clim_list<-rast_clim_yearlist[[index_m]] #select relevant monthly climatology image ...
|
|
747 |
rast_clim_list<-rast_clim_yearlist[[index_m]] #select relevant monthly climatology image ...
|
|
748 |
rast_clim_mod <- stack(rast_clim_list)
|
|
749 |
names(rast_clim_mod) <- names(rast_clim_list)
|
|
750 |
rast_clim_month <- subset(rast_clim_mod,1) #example layer to interpolate to
|
|
751 |
|
|
752 |
daily_delta_rast<-interpolate(rast_clim_month,fitdelta) #Interpolation of the of the daily devation
|
|
753 |
#there is only one daily devation (delta) sruface in this case
|
|
754 |
|
|
755 |
#To many I/O out of swap memory on atlas
|
|
756 |
#Saving kriged surface in raster images
|
|
757 |
data_name<-paste("daily_delta_",y_var_name,"_",model_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
758 |
sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],sep="")
|
|
759 |
raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
760 |
writeRaster(daily_delta_rast, filename=raster_name_delta,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
761 |
|
|
762 |
list_daily_delta_rast[[1]] <- raster_name_delta
|
|
763 |
list_mod_krtmp2[[1]] <- mod_krtmp2
|
|
764 |
}
|
|
765 |
|
|
766 |
if(!is.null(list_models2)){ #change here...
|
|
767 |
list_daily_delta_rast <- vector("list",length=1) #several delta surfaces in this case but stored as one list!!
|
|
768 |
list_mod_krtmp2 <- vector("list",length=1) #several delta model in this case but stored as one list!!
|
|
769 |
|
|
770 |
dev_mod_name<-paste("dev_mod",1:length(list_models2),sep="") #change to more meaningful name?
|
|
771 |
model_name<-paste("mod_stat_",sep="_")
|
|
772 |
#Now generate file names for the predictions...
|
|
773 |
list_out_filename<-vector("list",length(list_models2))
|
|
774 |
names(list_out_filename)<- dev_mod_name
|
|
775 |
|
|
776 |
##Change name...
|
|
777 |
for (j in 1:length(list_out_filename)){
|
|
778 |
#j indicate which month is predicted, var indicates TMIN or TMAX
|
|
779 |
data_name<-paste("daily_delta_",y_var_name,"_",model_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
780 |
sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],
|
|
781 |
"_",interp_method2,"_",dev_mod_name[j],sep="")
|
|
782 |
raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
783 |
|
|
784 |
list_out_filename[[j]]<-raster_name_delta
|
|
785 |
}
|
|
786 |
|
|
787 |
#Now call function
|
|
788 |
|
|
789 |
#for (j in 1:length(list_models2)){
|
|
790 |
dmoday$y_var <- daily_delta
|
|
791 |
#coordinates(data_s)<-cbind(data_s$x,data_s$y)
|
|
792 |
#proj4string(data_s)<-proj_str
|
|
793 |
#coordinates(data_v)<-cbind(data_v$x,data_v$y)
|
|
794 |
#proj4string(data_v)<-proj_str
|
|
795 |
|
|
796 |
interp_area_obj <-interpolate_area_fun(interp_method2,list_models2,s_raster,list_out_filename,dmoday)
|
|
797 |
rast_pred_list <- interp_area_obj$rast_pred_list
|
|
798 |
rast_pred_list <-rast_pred_list[!sapply(rast_pred_list,is.null)] #remove NULL elements in list
|
|
799 |
list_daily_delta_rast[[1]] <-rast_pred_list
|
|
800 |
#names(list_daily_delta_rast) <- names(daily_delta_df)
|
|
801 |
list_mod_krtmp2[[1]] <-interp_area_obj$mod_list
|
|
802 |
}
|
|
803 |
}
|
|
804 |
|
|
805 |
if(use_clim_image==TRUE){
|
|
806 |
|
|
807 |
# User can choose to join daily and monthly station before interpolation:
|
|
808 |
#-this ensures that the delta difference is "more" exact since its starting point is basesd on average value but there is risk to loose some stations
|
|
809 |
#may need to change this option later!!
|
|
810 |
#if jion_Daily is true then daily station used as training will match monthly station used as training
|
|
811 |
|
|
812 |
if(join_daily==TRUE){
|
|
813 |
dmoday <-merge(modst,d,by="id",suffixes=c("",".y2"))
|
|
814 |
xmoday <-merge(modst,x,by="id",suffixes=c("",".y2"))
|
|
815 |
mod_pat<-glob2rx("*.y2") #remove duplicate columns that have ".y2" in their names
|
|
816 |
var_pat<-grep(mod_pat,names(dmoday),value=FALSE) # using grep with "value" extracts the matching names
|
|
817 |
dmoday<-dmoday[,-var_pat] #dropping relevant columns
|
|
818 |
mod_pat<-glob2rx("*.y2")
|
|
819 |
var_pat<-grep(mod_pat,names(xmoday),value=FALSE) # using grep with "value" extracts the matching names
|
|
820 |
xmoday<-xmoday[,-var_pat] #Removing duplicate columns
|
|
821 |
|
|
822 |
data_v<-xmoday
|
|
823 |
|
|
824 |
}else{
|
|
825 |
dmoday<-d
|
|
826 |
data_v<-x
|
|
827 |
}
|
|
828 |
|
|
829 |
|
|
830 |
#dmoday contains the daily tmax values for training with TMax/TMin being the monthly station tmax/tmin mean
|
|
831 |
#xmoday contains the daily tmax values for validation with TMax/TMin being the monthly station tmax/tmin mean
|
|
832 |
|
|
833 |
#coords <-dmoday[,c("coords.x1","coords.x2")]
|
|
834 |
coords <-dmoday[,c("x","y")]
|
|
835 |
coordinates(dmoday)<-coords
|
|
836 |
proj4string(dmoday)<-proj_str
|
|
837 |
|
|
838 |
#Now compute daily delta deviation from climatology layer:
|
|
839 |
|
|
840 |
rast_clim_list<-rast_clim_yearlist[[index_m]] #select relevant monthly climatology image ...
|
|
841 |
rast_clim_mod <- stack(rast_clim_list)
|
|
842 |
names(rast_clim_mod) <- names(rast_clim_list)
|
|
843 |
extract_data_s <-extract(rast_clim_mod,dmoday,df=TRUE)
|
|
844 |
#list_daily_delta
|
|
845 |
daily_delta_df <- dmoday[[y_var_name]] - extract_data_s
|
|
846 |
daily_delta_df <- daily_delta_df[,-1]
|
|
847 |
names(daily_delta_df) <- paste(names(daily_delta_df),"_del",sep="")
|
|
848 |
|
|
849 |
names(extract_data_s) <- paste(names(extract_data_s),"_m",sep="") # "m" for monthly predictions...
|
|
850 |
dmoday <-spCbind(dmoday,extract_data_s) #contains the predicted clim at locations
|
|
851 |
dmoday <-spCbind(dmoday,daily_delta_df) #contains the predicted clim at locations
|
|
852 |
#Now krige forevery model !! loop
|
|
853 |
list_mod_krtmp2 <- vector("list",length=nlayers(rast_clim_mod))
|
|
854 |
list_daily_delta_rast <- vector("list",length=nlayers(rast_clim_mod))
|
|
855 |
names(list_daily_delta_rast) <- names(daily_delta_df)
|
|
856 |
names(list_mod_krtmp2) <- names(daily_delta_df)
|
|
857 |
for(k in 1:nlayers(rast_clim_mod)){
|
|
858 |
|
|
859 |
daily_delta <- daily_delta_df[[k]] #Current daily deviation being process: the reference monthly prediction varies...
|
|
860 |
#model_name<-paste("mod_kr","day",sep="_")
|
|
861 |
model_name<- names(daily_delta_df)[k]
|
|
862 |
|
|
863 |
if(is.null(list_models2)){
|
|
864 |
daily_delta_xy<-as.matrix(cbind(dmoday$x,dmoday$y))
|
|
865 |
fitdelta<-Krig(daily_delta_xy,daily_delta,theta=1e5) #use TPS or krige
|
|
866 |
list_mod_krtmp2[[k]] <-fitdelta
|
|
867 |
names(list_mod_krtmp2)[k] <- model_name
|
|
868 |
#data_s$daily_delta<-daily_delta
|
|
869 |
#rast_clim_list<-rast_clim_yearlist[[index_m]] #select relevant monthly climatology image ...
|
|
870 |
rast_clim_month <- subset(rast_clim_mod,1) #example layer to interpolate to
|
|
871 |
|
|
872 |
daily_delta_rast<-interpolate(rast_clim_month,fitdelta) #Interpolation of the bias surface...
|
|
873 |
|
|
874 |
#list_daily_delta_rast[[k]] <- raster_name_delta
|
|
875 |
|
|
876 |
data_name<-paste("daily_delta_",y_var_name,"_",model_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
877 |
sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],sep="")
|
|
878 |
raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
879 |
|
|
880 |
writeRaster(daily_delta_rast, filename=raster_name_delta,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
881 |
#writeRaster(r_spat, NAflag=NA_flag_val,filename=raster_name,bylayer=TRUE,bandorder="BSQ",overwrite=TRUE)
|
|
882 |
|
|
883 |
#raster_name_delta <- list_daily_delta_rast
|
|
884 |
#mod_krtmp2 <- list_mod_krtmp2
|
|
885 |
list_daily_delta_rast[[k]] <- raster_name_delta
|
|
886 |
|
|
887 |
}
|
|
888 |
|
|
889 |
if (!is.null(list_models2)){
|
|
890 |
|
|
891 |
#list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
|
892 |
dev_mod_name<-paste("dev_mod",1:length(list_models2),sep="") #change to more meaningful name?
|
|
893 |
|
|
894 |
#Now generate file names for the predictions...
|
|
895 |
list_out_filename<-vector("list",length(list_models2))
|
|
896 |
names(list_out_filename)<- dev_mod_name
|
|
897 |
|
|
898 |
##Change name...
|
|
899 |
for (j in 1:length(list_out_filename)){
|
|
900 |
#j indicate which month is predicted, var indicates TMIN or TMAX
|
|
901 |
data_name<-paste("daily_delta_",y_var_name,"_",model_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
902 |
sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],
|
|
903 |
"_",interp_method2,"_",dev_mod_name[j],sep="")
|
|
904 |
raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
905 |
|
|
906 |
list_out_filename[[j]]<-raster_name_delta
|
|
907 |
}
|
|
908 |
|
|
909 |
#Now call function
|
|
910 |
|
|
911 |
#for (j in 1:length(list_models2)){
|
|
912 |
dmoday$y_var <- daily_delta
|
|
913 |
#coordinates(data_s)<-cbind(data_s$x,data_s$y)
|
|
914 |
#proj4string(data_s)<-proj_str
|
|
915 |
#coordinates(data_v)<-cbind(data_v$x,data_v$y)
|
|
916 |
#proj4string(data_v)<-proj_str
|
|
917 |
|
|
918 |
interp_area_obj <-interpolate_area_fun(interp_method2,list_models2,s_raster,list_out_filename,dmoday)
|
|
919 |
rast_pred_list <- interp_area_obj$rast_pred_list
|
|
920 |
names(rast_pred_list) <- dev_mod_name
|
|
921 |
rast_pred_list <-rast_pred_list[!sapply(rast_pred_list,is.null)] #remove NULL elements in list
|
|
922 |
list_daily_delta_rast[[k]] <-rast_pred_list
|
|
923 |
names(list_daily_delta_rast) <- names(daily_delta_df)
|
|
924 |
mod_list <-interp_area_obj$mod_list
|
|
925 |
names(mod_list) <- dev_mod_name
|
|
926 |
list_mod_krtmp2[[k]] <-interp_area_obj$mod_list
|
|
927 |
}
|
|
928 |
|
|
929 |
}
|
|
930 |
|
|
931 |
#Too many I/O out of swap memory on atlas
|
|
932 |
#Saving kriged surface in raster images
|
|
933 |
#delta_rast_s <-stack(list_daily_delta_rast)
|
|
934 |
#names(delta_rast_s) <- names(daily_delta_df)
|
|
935 |
|
|
936 |
#Should check that all delta images have been created for every model!!! remove from list empty elements!!
|
|
937 |
|
|
938 |
#data_name<-paste("daily_delta_",y_var_name,"_",model_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
939 |
# sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],sep="")
|
|
940 |
#raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
941 |
#writeRaster(daily_delta_rast, filename=raster_name_delta,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
942 |
|
|
943 |
#data_name<-paste("daily_delta_",y_var_name,"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
944 |
# sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],sep="")
|
|
945 |
#raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
|
946 |
|
|
947 |
#writeRaster(delta_rast_s, filename=raster_name_delta,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
948 |
#writeRaster(r_spat, NAflag=NA_flag_val,filename=raster_name,bylayer=TRUE,bandorder="BSQ",overwrite=TRUE)
|
|
949 |
|
|
950 |
#raster_name_delta <- list_daily_delta_rast
|
|
951 |
#mod_krtmp2 <- list_mod_krtmp2
|
|
952 |
}
|
|
953 |
|
|
954 |
#########
|
|
955 |
# STEP 4 - Calculate daily predictions - T(day) = clim(month) + delta(day)
|
|
956 |
#########
|
|
957 |
|
|
958 |
#if(use_clim_image==FALSE){
|
|
959 |
# list_daily_delta_rast <- rep(raster_name_delta,length=nlayers(rast_clim_mod))
|
|
960 |
#}
|
|
961 |
#Now predict daily after having selected the relevant month
|
|
962 |
temp_list<-vector("list",nlayers(rast_clim_mod))
|
|
963 |
for (k in 1:nlayers(rast_clim_mod)){
|
|
964 |
if(use_clim_image==TRUE){
|
|
965 |
if (is.null(list_models2)){
|
|
966 |
daily_delta_rast <- raster(list_daily_delta_rast[[k]]) #There is only one image of deviation per model if list_models2 is NULL
|
|
967 |
}
|
|
968 |
if (!is.null(list_models2)){ #then possible multiple daily dev predictions
|
|
969 |
daily_delta_rast <- stack(unlist(list_daily_delta_rast[[k]]))
|
|
970 |
}
|
|
971 |
#daily_delta_rast <- subset(delta_rast_s,k)
|
|
972 |
}
|
|
973 |
#if use_clim_image==FALSE then daily__delta_rast already defined earlier...
|
|
974 |
|
|
975 |
if(use_clim_image==FALSE){
|
|
976 |
if (is.null(list_models2)){
|
|
977 |
daily_delta_rast <- raster(list_daily_delta_rast[[1]]) #There is only one image of deviation per model if list_models2 is NULL
|
|
978 |
}
|
|
979 |
if (!is.null(list_models2)){ #then possible multiple daily dev predictions hence use stack
|
|
980 |
daily_delta_rast <- stack(unlist(list_daily_delta_rast[[1]]))
|
|
981 |
}
|
|
982 |
#daily_delta_rast <- subset(delta_rast_s,k)
|
|
983 |
}
|
|
984 |
|
|
985 |
#rast_clim_month<-raster(rast_clim_list[[k]])
|
|
986 |
rast_clim_month <- subset(rast_clim_mod,k) #long term monthly prediction
|
|
987 |
if (is.null(list_models2)){
|
|
988 |
temp_predicted<-rast_clim_month + daily_delta_rast
|
|
989 |
data_name<-paste(y_var_name,"_predicted_",names(rast_clim_mod)[k],"_",sampling_month_dat$prop[index_m],"_",sampling_month_dat$run_samp[index_m],"_",
|
|
990 |
sampling_dat$date[index_d],"_",sampling_dat$prop[index_d],"_",sampling_dat$run_samp[index_d],sep="")
|
|
991 |
raster_name<-file.path(out_path,paste(interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
Adding files from test server