8 |
8 |
# 5)runGAMFusion <- function(i,list_param) : daily step for fusion method, perform daily prediction
|
9 |
9 |
#
|
10 |
10 |
#AUTHOR: Benoit Parmentier
|
11 |
|
#DATE: 06/03/2013
|
|
11 |
#DATE: 06/05/2013
|
12 |
12 |
#PROJECT: NCEAS INPLANT: Environment and Organisms --TASK#363--
|
13 |
13 |
|
14 |
14 |
##Comments and TODO:
|
... | ... | |
57 |
57 |
return(list_fitted_models)
|
58 |
58 |
}
|
59 |
59 |
|
60 |
|
predict_auto_krige_raster_model<-function(list_formulas,r_stack,out_filename){
|
|
60 |
select_var_stack <-function(r_stack,formula_mod,spdf=TRUE){
|
|
61 |
##Write function to return only the relevant layers!!
|
|
62 |
#Note that default behaviour of the function is to remove na values in the subset
|
|
63 |
#of raster layers and return a spdf
|
|
64 |
|
|
65 |
### Start
|
|
66 |
|
|
67 |
covar_terms<-all.vars(formula_mod) #all covariates terms...+ y_var
|
|
68 |
if (length(covar_terms)==1){
|
|
69 |
r_stack_covar<-subset(r_stack,1)
|
|
70 |
} #use one layer
|
|
71 |
if (length(covar_terms)> 1){
|
|
72 |
r_stack_covar <-subset(r_stack,covar_terms[-1])
|
|
73 |
}
|
|
74 |
if (spdf==TRUE){
|
|
75 |
s_sgdf<-as(r_stack_covar,"SpatialGridDataFrame") #Conversion to spatial grid data frame, only convert the necessary layers!!
|
|
76 |
s_spdf<-as.data.frame(s_sgdf) #Note that this automatically removes all NA rows
|
|
77 |
s_spdf<-na.omit(s_spdf) #removes all rows that have na...
|
|
78 |
coords<- s_spdf[,c('s1','s2')]
|
|
79 |
coordinates(s_spdf)<-coords
|
|
80 |
proj4string(s_spdf)<-proj4string(s_sgdf) #Need to assign coordinates...
|
|
81 |
#raster_pred <- rasterize(s_spdf,r1,"pred",fun=mean)
|
|
82 |
covar_obj<-s_spdf
|
|
83 |
} else{
|
|
84 |
covar_obj<-r_stack_covar
|
|
85 |
}
|
|
86 |
|
|
87 |
return(covar_obj)
|
|
88 |
}
|
|
89 |
|
|
90 |
remove_na_spdf<-function(col_names,d_spdf){
|
|
91 |
#Purpose: remote na items from a subset of a SpatialPointsDataFrame
|
|
92 |
x<-d_spdf
|
|
93 |
coords <-coordinates(x)
|
|
94 |
x$s1<-coords[,1]
|
|
95 |
x$s2<-coords[,2]
|
|
96 |
|
|
97 |
x1<-x[c(col_names,"s1","s2")]
|
|
98 |
#x1$y_var <-data_training$y_var
|
|
99 |
#names(x1)
|
|
100 |
x1<-na.omit(as.data.frame(x1))
|
|
101 |
coordinates(x1)<-x1[c("s1","s2")]
|
|
102 |
proj4string(x1)<-proj4string(d_spdf)
|
|
103 |
return(x1)
|
|
104 |
}
|
|
105 |
|
|
106 |
predict_auto_krige_raster_model<-function(list_formulas,r_stack,data_training,out_filename){
|
61 |
107 |
#This functions performs predictions on a raster grid given input models.
|
62 |
108 |
#Arguments: list of fitted models, raster stack of covariates
|
63 |
109 |
#Output: spatial grid data frame of the subset of tiles
|
64 |
110 |
|
65 |
111 |
list_fitted_models<-vector("list",length(list_formulas))
|
|
112 |
list_rast_pred<-vector("list",length(list_formulas))
|
|
113 |
#s_sgdf<-as(r_stack,"SpatialGridDataFrame") #Conversion to spatial grid data frame, only convert the necessary layers!!
|
|
114 |
proj4string(data_training) <- projection(r_stack)
|
66 |
115 |
for (k in 1:length(list_formulas)){
|
67 |
|
formula<-list_formulas[[k]]
|
68 |
|
mod<- try(gam(formula, data=data_training)) #change to any model!!
|
69 |
|
#mod<- try(autoKrige(formula, input_data=data_s,new_data=s_sgdf,data_variogram=data_s))
|
|
116 |
formula_mod<-list_formulas[[k]]
|
|
117 |
raster_name<-out_filename[[k]]
|
|
118 |
#mod<- try(gam(formula, data=data_training)) #change to any model!!
|
|
119 |
s_spdf<-select_var_stack(r_stack,formula_mod,spdf=TRUE)
|
|
120 |
col_names<-all.vars(formula_mod)
|
|
121 |
if (length(col_names)==1){
|
|
122 |
data_fit <-data_training
|
|
123 |
}else{
|
|
124 |
data_fit <- remove_na_spdf(col_names,data_training)
|
|
125 |
}
|
|
126 |
|
|
127 |
mod <- try(autoKrige(formula_mod, input_data=data_fit,new_data=s_spdf,data_variogram=data_fit))
|
|
128 |
#mod <- try(autoKrige(formula_mod, input_data=data_training,new_data=s_spdf,data_variogram=data_training))
|
70 |
129 |
model_name<-paste("mod",k,sep="")
|
71 |
130 |
assign(model_name,mod)
|
72 |
|
list_fitted_models[[k]]<-mod
|
73 |
|
}
|
74 |
|
return(list_fitted_models)
|
75 |
|
|
76 |
|
list_rast_pred<-vector("list",length(in_models))
|
77 |
|
for (i in 1:length(in_models)){
|
78 |
|
mod <-in_models[[i]] #accessing GAM model ojbect "j"
|
79 |
|
raster_name<-out_filename[[i]]
|
80 |
|
if (inherits(mod,"gam")) { #change to c("gam","autoKrige")
|
81 |
|
raster_pred<- predict(object=r_stack,model=mod,na.rm=FALSE) #Using the coeff to predict new values.
|
82 |
|
names(raster_pred)<-"y_pred"
|
83 |
|
writeRaster(raster_pred, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
|
131 |
|
|
132 |
if (inherits(mod,"autoKrige")) { #change to c("gam","autoKrige")
|
|
133 |
rpred<-mod$krige_output #Extracting the SptialGriDataFrame from the autokrige object
|
|
134 |
y_pred<-rpred$var1.pred #is the order the same?
|
|
135 |
raster_pred <- rasterize(rpred,r_stack,"var1.pred",fun=mean)
|
|
136 |
names(raster_pred)<-"y_pred"
|
|
137 |
writeRaster(raster_pred, filename=raster_name,overwrite=TRUE) #Writing the data in a raster file format...
|
84 |
138 |
#print(paste("Interpolation:","mod", j ,sep=" "))
|
85 |
|
list_rast_pred[[i]]<-raster_name
|
|
139 |
list_rast_pred[[k]]<-raster_name
|
|
140 |
mod$krige_output<-NULL
|
|
141 |
list_fitted_models[[k]]<-mod
|
|
142 |
|
|
143 |
}
|
|
144 |
if (inherits(mod,"try-error")) {
|
|
145 |
print(paste("no autokrige model fitted:",mod,sep=" ")) #change message for any model type...
|
|
146 |
list_fitted_models[[k]]<-mod
|
86 |
147 |
}
|
87 |
148 |
}
|
88 |
|
if (inherits(mod,"try-error")) {
|
89 |
|
print(paste("no gam model fitted:",mod[1],sep=" ")) #change message for any model type...
|
90 |
|
}
|
91 |
|
return(list_rast_pred)
|
|
149 |
day_prediction_obj <-list(list_fitted_models,list_rast_pred)
|
|
150 |
names(day_prediction_obj) <-c("list_fitted_models","list_rast_pred")
|
|
151 |
return(day_prediction_obj)
|
92 |
152 |
}
|
93 |
153 |
|
94 |
154 |
fit_models<-function(list_formulas,data_training){
|
... | ... | |
264 |
324 |
|
265 |
325 |
}
|
266 |
326 |
|
|
327 |
#Maybe should just use the same code...
|
267 |
328 |
|
268 |
|
runClim_KGFusion<-function(j,list_param){
|
|
329 |
runKriging_day_fun <-function(i,list_param){
|
269 |
330 |
|
270 |
331 |
#Make this a function with multiple argument that can be used by mcmapply??
|
271 |
332 |
#Arguments:
|
... | ... | |
274 |
335 |
#3)covar_names: names of input variables
|
275 |
336 |
#4)lst_avg: list of LST climatogy names, may be removed later on
|
276 |
337 |
#5)list_models: list input models for bias calculation
|
277 |
|
#6)dst: data at the monthly time scale
|
|
338 |
#6)sampling_obj: data at the daily time scale
|
278 |
339 |
#7)var: TMAX or TMIN, variable being interpolated
|
279 |
340 |
#8)y_var_name: output name, not used at this stage
|
280 |
341 |
#9)out_prefix
|
281 |
|
#
|
|
342 |
#10) out_path
|
|
343 |
|
282 |
344 |
#The output is a list of four shapefile names produced by the function:
|
283 |
345 |
#1) clim: list of output names for raster climatogies
|
284 |
346 |
#2) data_month: monthly training data for bias surface modeling
|
... | ... | |
288 |
350 |
### PARSING INPUT ARGUMENTS
|
289 |
351 |
#list_param_runGAMFusion<-list(i,clim_yearlist,sampling_obj,var,y_var_name, out_prefix)
|
290 |
352 |
|
291 |
|
index<-list_param$j
|
|
353 |
index<-list_param$list_index
|
292 |
354 |
s_raster<-list_param$covar_rast
|
293 |
355 |
covar_names<-list_param$covar_names
|
294 |
356 |
lst_avg<-list_param$lst_avg
|
295 |
357 |
list_models<-list_param$list_models
|
296 |
358 |
dst<-list_param$dst #monthly station dataset
|
|
359 |
sampling_obj<-list_param$sampling_obj
|
297 |
360 |
var<-list_param$var
|
298 |
361 |
y_var_name<-list_param$y_var_name
|
|
362 |
interpolation_method <-list_param$interpolation_method
|
299 |
363 |
out_prefix<-list_param$out_prefix
|
300 |
364 |
out_path<-list_param$out_path
|
301 |
365 |
|
302 |
|
#Model and response variable can be changed without affecting the script
|
303 |
|
prop_month<-0 #proportion retained for validation
|
304 |
|
run_samp<-1 #This option can be added later on if/when neeeded
|
305 |
|
|
306 |
|
#### STEP 2: PREPARE DATA
|
307 |
|
|
308 |
|
data_month<-dst[dst$month==j,] #Subsetting dataset for the relevant month of the date being processed
|
309 |
|
LST_name<-lst_avg[j] # name of LST month to be matched
|
310 |
|
data_month$LST<-data_month[[LST_name]]
|
311 |
|
|
312 |
|
#Adding layer LST to the raster stack
|
313 |
|
covar_rast<-s_raster
|
314 |
|
#names(s_raster)<-covar_names
|
315 |
|
pos<-match("LST",names(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
316 |
|
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
317 |
|
LST<-subset(s_raster,LST_name)
|
318 |
|
names(LST)<-"LST"
|
319 |
|
s_raster<-addLayer(s_raster,LST) #Adding current month
|
320 |
|
|
321 |
|
#LST bias to model...
|
322 |
|
if (var=="TMAX"){
|
323 |
|
data_month$LSTD_bias<-data_month$LST-data_month$TMax
|
324 |
|
data_month$y_var<-data_month$LSTD_bias #Adding bias as the variable modeled
|
325 |
|
}
|
326 |
|
if (var=="TMIN"){
|
327 |
|
data_month$LSTD_bias<-data_month$LST-data_month$TMin
|
328 |
|
data_month$y_var<-data_month$LSTD_bias #Adding bias as the variable modeled
|
329 |
|
}
|
330 |
|
|
331 |
|
#### STEP3: NOW FIT AND PREDICT MODEL
|
332 |
|
|
333 |
|
list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
334 |
|
|
335 |
|
mod_list<-fit_models(list_formulas,data_month) #only gam at this stage
|
336 |
|
cname<-paste("mod",1:length(mod_list),sep="") #change to more meaningful name?
|
337 |
|
names(mod_list)<-cname
|
338 |
|
|
339 |
|
#Now generate file names for the predictions...
|
340 |
|
list_out_filename<-vector("list",length(mod_list))
|
341 |
|
names(list_out_filename)<-cname
|
342 |
|
|
343 |
|
for (k in 1:length(list_out_filename)){
|
344 |
|
#j indicate which month is predicted, var indicates TMIN or TMAX
|
345 |
|
data_name<-paste(var,"_bias_LST_month_",j,"_",cname[k],"_",prop_month,
|
346 |
|
"_",run_samp,sep="")
|
347 |
|
raster_name<-file.path(out_path,paste("fusion_",data_name,out_prefix,".tif", sep=""))
|
348 |
|
list_out_filename[[k]]<-raster_name
|
349 |
|
}
|
350 |
|
|
351 |
|
#now predict values for raster image...
|
352 |
|
rast_bias_list<-predict_raster_model(mod_list,s_raster,list_out_filename)
|
353 |
|
names(rast_bias_list)<-cname
|
354 |
|
#Some modles will not be predicted...remove them
|
355 |
|
rast_bias_list<-rast_bias_list[!sapply(rast_bias_list,is.null)] #remove NULL elements in list
|
356 |
|
|
357 |
|
mod_rast<-stack(rast_bias_list) #stack of bias raster images from models
|
358 |
|
rast_clim_list<-vector("list",nlayers(mod_rast))
|
359 |
|
names(rast_clim_list)<-names(rast_bias_list)
|
360 |
|
for (k in 1:nlayers(mod_rast)){
|
361 |
|
clim_fus_rast<-LST-subset(mod_rast,k)
|
362 |
|
data_name<-paste(var,"_clim_LST_month_",j,"_",names(rast_clim_list)[k],"_",prop_month,
|
363 |
|
"_",run_samp,sep="")
|
364 |
|
raster_name<-file.path(out_path,paste("fusion_",data_name,out_prefix,".tif", sep=""))
|
365 |
|
rast_clim_list[[k]]<-raster_name
|
366 |
|
writeRaster(clim_fus_rast, filename=raster_name,overwrite=TRUE) #Wri
|
367 |
|
}
|
368 |
|
|
369 |
|
#### STEP 4:Adding Kriging for Climatology options
|
370 |
|
|
371 |
|
bias_xy<-coordinates(data_month)
|
372 |
|
fitbias<-Krig(bias_xy,data_month$LSTD_bias,theta=1e5) #use TPS or krige
|
373 |
|
mod_krtmp1<-fitbias
|
374 |
|
model_name<-"mod_kr"
|
375 |
|
|
376 |
|
|
377 |
|
bias_rast<-interpolate(LST,fitbias) #interpolation using function from raster package
|
378 |
|
#Saving kriged surface in raster images
|
379 |
|
data_name<-paste(var,"_bias_LST_month_",j,"_",model_name,"_",prop_month,
|
380 |
|
"_",run_samp,sep="")
|
381 |
|
raster_name_bias<-file.path(out_path,paste("fusion_",data_name,out_prefix,".tif", sep=""))
|
382 |
|
writeRaster(bias_rast, filename=raster_name_bias,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
383 |
|
|
384 |
|
#now climatology layer
|
385 |
|
clim_rast<-LST-bias_rast
|
386 |
|
data_name<-paste(var,"_clim_LST_month_",j,"_",model_name,"_",prop_month,
|
387 |
|
"_",run_samp,sep="")
|
388 |
|
raster_name_clim<-file.path(out_path,paste("fusion_",data_name,out_prefix,".tif", sep=""))
|
389 |
|
writeRaster(clim_rast, filename=raster_name_clim,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
390 |
|
|
391 |
|
#Adding to current objects
|
392 |
|
mod_list[[model_name]]<-mod_krtmp1
|
393 |
|
rast_bias_list[[model_name]]<-raster_name_bias
|
394 |
|
rast_clim_list[[model_name]]<-raster_name_clim
|
395 |
|
|
396 |
|
#### STEP 5: Prepare object and return
|
397 |
|
|
398 |
|
clim_obj<-list(rast_bias_list,rast_clim_list,data_month,mod_list,list_formulas)
|
399 |
|
names(clim_obj)<-c("bias","clim","data_month","mod","formulas")
|
400 |
|
|
401 |
|
save(clim_obj,file= file.path(out_path,paste("clim_obj_month_",j,"_",var,"_",out_prefix,".RData",sep="")))
|
402 |
366 |
|
403 |
|
return(clim_obj)
|
404 |
|
}
|
405 |
|
|
406 |
|
## Run function for kriging...?
|
407 |
|
|
408 |
|
#runGAMFusion <- function(i,list_param) { # loop over dates
|
409 |
|
run_prediction_daily_deviation <- function(i,list_param) { # loop over dates
|
410 |
|
#This function produce daily prediction using monthly predicted clim surface.
|
411 |
|
#The output is both daily prediction and daily deviation from monthly steps.
|
412 |
|
|
413 |
|
#### Change this to allow explicitly arguments...
|
414 |
|
#Arguments:
|
415 |
|
#1)index: loop list index for individual run/fit
|
416 |
|
#2)clim_year_list: list of climatology files for all models...(12*nb of models)
|
417 |
|
#3)sampling_obj: contains, data per date/fit, sampling information
|
418 |
|
#4)dst: data at the monthly time scale
|
419 |
|
#5)var: variable predicted -TMAX or TMIN
|
420 |
|
#6)y_var_name: name of the variable predicted - dailyTMax, dailyTMin
|
421 |
|
#7)out_prefix
|
422 |
|
#8)out_path
|
423 |
|
#
|
424 |
|
#The output is a list of four shapefile names produced by the function:
|
425 |
|
#1) list_temp: y_var_name
|
426 |
|
#2) rast_clim_list: list of files for temperature climatology predictions
|
427 |
|
#3) delta: list of files for temperature delta predictions
|
428 |
|
#4) data_s: training data
|
429 |
|
#5) data_v: testing data
|
430 |
|
#6) sampling_dat: sampling information for the current prediction (date,proportion of holdout and sample number)
|
431 |
|
#7) mod_kr: kriging delta fit, field package model object
|
432 |
|
|
433 |
|
### PARSING INPUT ARGUMENTS
|
434 |
|
|
435 |
|
#list_param_runGAMFusion<-list(i,clim_yearlist,sampling_obj,var,y_var_name, out_prefix)
|
436 |
|
rast_clim_yearlist<-list_param$clim_yearlist
|
437 |
|
sampling_obj<-list_param$sampling_obj
|
438 |
367 |
ghcn.subsets<-sampling_obj$ghcn_data_day
|
439 |
368 |
sampling_dat <- sampling_obj$sampling_dat
|
440 |
369 |
sampling <- sampling_obj$sampling_index
|
441 |
|
var<-list_param$var
|
442 |
|
y_var_name<-list_param$y_var_name
|
443 |
|
out_prefix<-list_param$out_prefix
|
444 |
|
dst<-list_param$dst #monthly station dataset
|
445 |
|
out_path <-list_param$out_path
|
446 |
370 |
|
447 |
371 |
##########
|
448 |
372 |
# STEP 1 - Read in information and get traing and testing stations
|
... | ... | |
452 |
376 |
month<-strftime(date, "%m") # current month of the date being processed
|
453 |
377 |
LST_month<-paste("mm_",month,sep="") # name of LST month to be matched
|
454 |
378 |
proj_str<-proj4string(dst) #get the local projection information from monthly data
|
455 |
|
|
|
379 |
|
|
380 |
#Adding layer LST to the raster stack
|
|
381 |
#names(s_raster)<-covar_names
|
|
382 |
pos<-match("LST",names(s_raster)) #Find the position of the layer with name "LST", if not present pos=NA
|
|
383 |
s_raster<-dropLayer(s_raster,pos) # If it exists drop layer
|
|
384 |
LST<-subset(s_raster,LST_month)
|
|
385 |
names(LST)<-"LST"
|
|
386 |
s_raster<-addLayer(s_raster,LST) #Adding current month
|
|
387 |
|
456 |
388 |
###Regression part 1: Creating a validation dataset by creating training and testing datasets
|
457 |
389 |
data_day<-ghcn.subsets[[i]]
|
458 |
390 |
mod_LST <- ghcn.subsets[[i]][,match(LST_month, names(ghcn.subsets[[i]]))] #Match interpolation date and monthly LST average
|
459 |
|
data_day$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the dataset
|
|
391 |
data_day$LST <- as.data.frame(mod_LST)[,1] #Add the variable LST to the daily dataset
|
460 |
392 |
dst$LST<-dst[[LST_month]] #Add the variable LST to the monthly dataset
|
461 |
393 |
|
462 |
394 |
ind.training<-sampling[[i]]
|
... | ... | |
473 |
405 |
day<-as.integer(strftime(date_proc, "%d"))
|
474 |
406 |
year<-as.integer(strftime(date_proc, "%Y"))
|
475 |
407 |
|
476 |
|
##########
|
477 |
|
# STEP 2 - JOIN DAILY AND MONTHLY STATION INFORMATION
|
478 |
|
##########
|
479 |
|
|
480 |
|
modst<-dst[dst$month==mo,] #Subsetting dataset for the relevant month of the date being processed
|
481 |
|
|
482 |
|
if (var=="TMIN"){
|
483 |
|
modst$LSTD_bias <- modst$LST-modst$TMin; #That is the difference between the monthly LST mean and monthly station mean
|
484 |
|
}
|
485 |
|
if (var=="TMAX"){
|
486 |
|
modst$LSTD_bias <- modst$LST-modst$TMax; #That is the difference between the monthly LST mean and monthly station mean
|
487 |
|
}
|
488 |
|
#This may be unnecessary since LSTD_bias is already in dst?? check the info
|
489 |
|
#Some loss of observations: LSTD_bias for January has only 56 out of 66 possible TMIN!!! We may need to look into this issue
|
490 |
|
#to avoid some losses of station data...
|
|
408 |
#### STEP 2: PREPARE DATA
|
491 |
409 |
|
492 |
|
#Clearn out this part: make this a function call
|
|
410 |
#Clean out this part: make this a function call
|
493 |
411 |
x<-as.data.frame(data_v)
|
494 |
412 |
d<-as.data.frame(data_s)
|
495 |
413 |
for (j in 1:nrow(x)){
|
... | ... | |
503 |
421 |
}
|
504 |
422 |
}
|
505 |
423 |
pos<-match("value",names(d)) #Find column with name "value"
|
506 |
|
#names(d)[pos]<-c("dailyTmax")
|
507 |
424 |
names(d)[pos]<-y_var_name
|
508 |
425 |
pos<-match("value",names(x)) #Find column with name "value"
|
509 |
426 |
names(x)[pos]<-y_var_name
|
... | ... | |
511 |
428 |
names(d)[pos]<-c("id")
|
512 |
429 |
pos<-match("station",names(x)) #Find column with name station ID
|
513 |
430 |
names(x)[pos]<-c("id")
|
514 |
|
pos<-match("station",names(modst)) #Find column with name station ID
|
515 |
|
names(modst)[pos]<-c("id") #modst contains the average tmax per month for every stations...
|
516 |
431 |
|
517 |
|
dmoday <-merge(modst,d,by="id",suffixes=c("",".y2"))
|
518 |
|
xmoday <-merge(modst,x,by="id",suffixes=c("",".y2"))
|
519 |
|
mod_pat<-glob2rx("*.y2") #remove duplicate columns that have ".y2" in their names
|
520 |
|
var_pat<-grep(mod_pat,names(dmoday),value=FALSE) # using grep with "value" extracts the matching names
|
521 |
|
dmoday<-dmoday[,-var_pat] #dropping relevant columns
|
522 |
|
mod_pat<-glob2rx("*.y2")
|
523 |
|
var_pat<-grep(mod_pat,names(xmoday),value=FALSE) # using grep with "value" extracts the matching names
|
524 |
|
xmoday<-xmoday[,-var_pat] #Removing duplicate columns
|
|
432 |
data_s<-d
|
|
433 |
data_v<-x
|
|
434 |
|
|
435 |
data_s$y_var <- data_s[[y_var_name]] #Adding the variable modeled
|
|
436 |
data_v$y_var <- data_v[[y_var_name]]
|
525 |
437 |
|
526 |
|
data_v<-xmoday
|
|
438 |
#Adding back spatal definition
|
527 |
439 |
|
528 |
|
#dmoday contains the daily tmax values for training with TMax/TMin being the monthly station tmax/tmin mean
|
529 |
|
#xmoday contains the daily tmax values for validation with TMax/TMin being the monthly station tmax/tmin mean
|
|
440 |
coordinates(data_s)<-cbind(data_s$x,data_s$y)
|
|
441 |
proj4string(data_s)<-proj_str
|
|
442 |
coordinates(data_v)<-cbind(data_v$x,data_v$y)
|
|
443 |
proj4string(data_v)<-proj_str
|
|
444 |
#### STEP3: NOW FIT AND PREDICT MODEL
|
530 |
445 |
|
531 |
|
##########
|
532 |
|
# STEP 3 - interpolate daily delta across space
|
533 |
|
##########
|
|
446 |
list_formulas<-lapply(list_models,as.formula,env=.GlobalEnv) #mulitple arguments passed to lapply!!
|
|
447 |
#models names
|
|
448 |
cname<-paste("mod",1:length(list_formulas),sep="") #change to more meaningful name?
|
|
449 |
names(list_formulas) <- cname
|
|
450 |
#Now generate output file names for the predictions...
|
|
451 |
list_out_filename<-vector("list",length(list_formulas))
|
|
452 |
names(list_out_filename)<-cname
|
534 |
453 |
|
535 |
|
#Change to take into account TMin and TMax
|
536 |
|
if (var=="TMIN"){
|
537 |
|
daily_delta<-dmoday$dailyTmin-dmoday$TMin #daily detl is the difference between monthly and daily temperatures
|
538 |
|
}
|
539 |
|
if (var=="TMAX"){
|
540 |
|
daily_delta<-dmoday$dailyTmax-dmoday$TMax
|
541 |
|
}
|
542 |
|
|
543 |
|
daily_delta_xy<-as.matrix(cbind(dmoday$x,dmoday$y))
|
544 |
|
fitdelta<-Krig(daily_delta_xy,daily_delta,theta=1e5) #use TPS or krige
|
545 |
|
mod_krtmp2<-fitdelta
|
546 |
|
model_name<-paste("mod_kr","day",sep="_")
|
547 |
|
data_s<-dmoday #put the
|
548 |
|
data_s$daily_delta<-daily_delta
|
549 |
|
|
550 |
|
#########
|
551 |
|
# STEP 4 - Calculate daily predictions - T(day) = clim(month) + delta(day)
|
552 |
|
#########
|
553 |
|
|
554 |
|
rast_clim_list<-rast_clim_yearlist[[mo]] #select relevant month
|
555 |
|
rast_clim_month<-raster(rast_clim_list[[1]])
|
556 |
|
|
557 |
|
daily_delta_rast<-interpolate(rast_clim_month,fitdelta) #Interpolation of the bias surface...
|
558 |
|
|
559 |
|
#Saving kriged surface in raster images
|
560 |
|
data_name<-paste("daily_delta_",y_var_name,"_",sampling_dat$date[i],"_",sampling_dat$prop[i],
|
561 |
|
"_",sampling_dat$run_samp[i],sep="")
|
562 |
|
raster_name_delta<-file.path(out_path,paste(interpolation_method,"_",var,"_",data_name,out_prefix,".tif", sep=""))
|
563 |
|
writeRaster(daily_delta_rast, filename=raster_name_delta,overwrite=TRUE) #Writing the data in a raster file format...(IDRISI)
|
564 |
|
|
565 |
|
#Now predict daily after having selected the relevant month
|
566 |
|
temp_list<-vector("list",length(rast_clim_list))
|
567 |
|
for (j in 1:length(rast_clim_list)){
|
568 |
|
rast_clim_month<-raster(rast_clim_list[[j]])
|
569 |
|
temp_predicted<-rast_clim_month+daily_delta_rast
|
570 |
|
|
571 |
|
data_name<-paste(y_var_name,"_predicted_",names(rast_clim_list)[j],"_",
|
|
454 |
for (k in 1:length(list_out_filename)){
|
|
455 |
#i indicate which day is predicted, y_var_name indicates TMIN or TMAX
|
|
456 |
data_name<-paste(y_var_name,"_predicted_",names(list_formulas)[k],"_",
|
572 |
457 |
sampling_dat$date[i],"_",sampling_dat$prop[i],
|
573 |
458 |
"_",sampling_dat$run_samp[i],sep="")
|
574 |
459 |
raster_name<-file.path(out_path,paste(interpolation_method,"_",data_name,out_prefix,".tif", sep=""))
|
575 |
|
writeRaster(temp_predicted, filename=raster_name,overwrite=TRUE)
|
576 |
|
temp_list[[j]]<-raster_name
|
|
460 |
list_out_filename[[k]]<-raster_name
|
577 |
461 |
}
|
578 |
462 |
|
579 |
|
##########
|
580 |
|
# STEP 5 - Prepare output object to return
|
581 |
|
##########
|
|
463 |
#now fit and predict values for raster image...
|
582 |
464 |
|
583 |
|
mod_krtmp2<-fitdelta
|
584 |
|
model_name<-paste("mod_kr","day",sep="_")
|
585 |
|
names(temp_list)<-names(rast_clim_list)
|
586 |
|
coordinates(data_s)<-cbind(data_s$x,data_s$y)
|
587 |
|
proj4string(data_s)<-proj_str
|
588 |
|
coordinates(data_v)<-cbind(data_v$x,data_v$y)
|
589 |
|
proj4string(data_v)<-proj_str
|
|
465 |
if (interpolation_method=="gam_daily"){
|
|
466 |
mod_list<-fit_models(list_formulas,data_s) #only gam at this stage
|
|
467 |
names(mod_list)<-cname
|
|
468 |
rast_day_list<-predict_raster_model(mod_list,s_raster,list_out_filename)
|
|
469 |
names(rast_day_list)<-cname
|
|
470 |
}
|
590 |
471 |
|
591 |
|
delta_obj<-list(temp_list,rast_clim_list,raster_name_delta,data_s,
|
592 |
|
data_v,sampling_dat[i,],mod_krtmp2)
|
|
472 |
if (interpolation_method=="kriging_daily"){
|
|
473 |
day_prediction_obj<-predict_auto_krige_raster_model(list_formulas,s_raster,data_s,list_out_filename)
|
|
474 |
mod_list <-day_prediction_obj$list_fitted_models
|
|
475 |
rast_day_list <-day_prediction_obj$list_rast_pred
|
|
476 |
names(rast_day_list)<-cname
|
|
477 |
}
|
|
478 |
|
|
479 |
#Some models will not be predicted...remove them
|
|
480 |
rast_day_list<-rast_day_list[!sapply(rast_day_list,is.null)] #remove NULL elements in list
|
593 |
481 |
|
594 |
|
obj_names<-c(y_var_name,"clim","delta","data_s","data_v",
|
595 |
|
"sampling_dat",model_name)
|
596 |
|
names(delta_obj)<-obj_names
|
597 |
|
save(delta_obj,file= file.path(out_path,paste("delta_obj_",var,"_",sampling_dat$date[i],"_",sampling_dat$prop[i],
|
598 |
|
"_",sampling_dat$run_samp[i],out_prefix,".RData",sep="")))
|
599 |
|
return(delta_obj)
|
|
482 |
#Prepare object to return
|
|
483 |
|
|
484 |
day_obj<- list(rast_day_list,data_s,data_v,sampling_dat[i,],mod_list,list_models)
|
|
485 |
obj_names<-c(y_var_name,"data_s","data_v","sampling_dat","mod","formulas")
|
|
486 |
names(day_obj)<-obj_names
|
|
487 |
save(day_obj,file= file.path(out_path,paste("day_obj_",interpolation_method,"_",var,"_",sampling_dat$date[i],"_",sampling_dat$prop[i],
|
|
488 |
"_",sampling_dat$run_samp[i],out_prefix,".RData",sep="")))
|
|
489 |
return(day_obj)
|
600 |
490 |
|
601 |
491 |
}
|
602 |
|
|
|
492 |
|
|
493 |
|
interpolation day script, adding kriging daily method prediction