Project

General

Profile

« Previous | Next » 

Revision 93e451e5

Added by Benoit Parmentier almost 12 years ago

FUSION, adding option to use same proportion of training for fusion and gam

View differences:

climate/research/oregon/interpolation/fusion_gam_prediction_reg.R
1
##################    CLIMATE INTERPOLATION FUSION METHOD   #######################################
2
############################ Merging LST and station data ##########################################
3
#This script interpolates tmax values using MODIS LST and GHCND station data                     #
4
#interpolation area. It requires the text file of stations and a shape file of the study area.   #       
5
#Note that the projection for both GHCND and study area is lonlat WGS84.                         #
6
#AUTHOR: Brian McGill                                                                            #
7
#DATE: 07/11/2012                                                                                 #
8
#PROJECT: NCEAS INPLANT: Environment and Organisms --TASK#363--                                  #
9
###################################################################################################
10

  
11
###Loading R library and packages                                                      
12
library(gtools)                                         # loading some useful tools 
13
library(mgcv)                                           # GAM package by Simon Wood
14
library(sp)                                             # Spatial pacakge with class definition by Bivand et al.
15
library(spdep)                               # Spatial pacakge with methods and spatial stat. by Bivand et al.
16
library(rgdal)                               # GDAL wrapper for R, spatial utilities
17
library(gstat)                               # Kriging and co-kriging by Pebesma et al.
18
library(fields)                              # NCAR Spatial Interpolation methods such as kriging, splines
19
library(raster)                              # Hijmans et al. package for raster processing
20

  
21
### Parameters and argument
22

  
23
infile1<- "ghcn_or_tmax_covariates_06262012_OR83M.shp"             #GHCN shapefile containing variables for modeling 2010                 
24
#infile2<-"list_10_dates_04212012.txt"                     #List of 10 dates for the regression
25
infile2<-"list_365_dates_04212012.txt"
26
infile3<-"LST_dates_var_names.txt"                        #LST dates name
27
infile4<-"models_interpolation_05142012.txt"              #Interpolation model names
28
infile5<-"mean_day244_rescaled.rst"                       #Raster or grid for the locations of predictions
29
infile6<-"LST_files_monthly_climatology.txt"
30
#path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations"
31
path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations"
32
#path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations_07152012"     #Jupiter LOCATION on Atlas for kriging"
33
#path<-"M:/Data/IPLANT_project/data_Oregon_stations"   #Locations on Atlas
34

  
35
#Station location of the study area
36
stat_loc<-read.table(paste(path,"/","location_study_area_OR_0602012.txt",sep=""),sep=",", header=TRUE)
37
#GHCN Database for 1980-2010 for study area (OR) 
38
data3<-read.table(paste(path,"/","ghcn_data_TMAXy1980_2010_OR_0602012.txt",sep=""),sep=",", header=TRUE)
39

  
40
prop<-0.3                                                                           #Proportion of testing retained for validation   
41
#prop<-0.25
42
seed_number<- 100                                                                   #Seed number for random sampling
43
out_prefix<-"_07152012_10d_fusion15"                                                   #User defined output prefix
44
setwd(path)
45
bias_val<-0            #if value 1 then training data is used in the bias surface rather than the all monthly stations
46

  
47
############ START OF THE SCRIPT ##################
48
#
49
#
50
### Step 0/Step 6 in Brian's code...preparing year 2010 data for modeling 
51
#
52

  
53

  
54
###Reading the station data and setting up for models' comparison
55
filename<-sub(".shp","",infile1)             #Removing the extension from file.
56
ghcn<-readOGR(".", filename)                 #reading shapefile 
57

  
58
CRS<-proj4string(ghcn)                       #Storing projection information (ellipsoid, datum,etc.)
59

  
60
mean_LST<- readGDAL(infile5)                 #Reading the whole raster in memory. This provides a grid for kriging
61
proj4string(mean_LST)<-CRS                   #Assigning coordinate information to prediction grid.
62

  
63
ghcn = transform(ghcn,Northness = cos(ASPECT*pi/180)) #Adding a variable to the dataframe
64
ghcn = transform(ghcn,Eastness = sin(ASPECT*pi/180))  #adding variable to the dataframe.
65
ghcn = transform(ghcn,Northness_w = sin(slope*pi/180)*cos(ASPECT*pi/180)) #Adding a variable to the dataframe
66
ghcn = transform(ghcn,Eastness_w = sin(slope*pi/180)*sin(ASPECT*pi/180))  #adding variable to the dataframe.
67

  
68
#Remove NA for LC and CANHEIGHT
69
ghcn$LC1[is.na(ghcn$LC1)]<-0
70
ghcn$LC3[is.na(ghcn$LC3)]<-0
71
ghcn$CANHEIGHT[is.na(ghcn$CANHEIGHT)]<-0
72

  
73
set.seed(seed_number)                        #Using a seed number allow results based on random number to be compared...
74

  
75
dates <-readLines(paste(path,"/",infile2, sep=""))
76
LST_dates <-readLines(paste(path,"/",infile3, sep=""))
77
models <-readLines(paste(path,"/",infile4, sep=""))
78

  
79
#Model assessment: specific diagnostic/metrics for GAM
80
results_AIC<- matrix(1,length(dates),length(models)+3)  
81
results_GCV<- matrix(1,length(dates),length(models)+3)
82
results_DEV<- matrix(1,length(dates),length(models)+3)
83
results_RMSE_f<- matrix(1,length(dates),length(models)+3)
84

  
85
#Model assessment: general diagnostic/metrics 
86
results_RMSE <- matrix(1,length(dates),length(models)+4)
87
results_MAE <- matrix(1,length(dates),length(models)+4)
88
results_ME <- matrix(1,length(dates),length(models)+4)       #There are 8+1 models
89
results_R2 <- matrix(1,length(dates),length(models)+4)       #Coef. of determination for the validation dataset
90
results_RMSE_f<- matrix(1,length(dates),length(models)+4)    #RMSE fit, RMSE for the training dataset
91
results_RMSE_f_kr<- matrix(1,length(dates),length(models)+4)
92

  
93
# #Tracking relationship between LST AND LC
94
# cor_LST_LC1<-matrix(1,10,1)      #correlation LST-LC1
95
# cor_LST_LC3<-matrix(1,10,1)      #correlation LST-LC3
96
# cor_LST_tmax<-matrix(1,10,1)     #correlation LST-tmax
97

  
98
#Screening for bad values: value is tmax in this case
99
#ghcn$value<-as.numeric(ghcn$value)
100
ghcn_all<-ghcn
101
ghcn_test<-subset(ghcn,ghcn$value>-150 & ghcn$value<400)
102
ghcn_test2<-subset(ghcn_test,ghcn_test$ELEV_SRTM>0)
103
ghcn<-ghcn_test2
104
#coords<- ghcn[,c('x_OR83M','y_OR83M')]
105

  
106
ghcn.subsets <-lapply(dates, function(d) subset(ghcn, date==d)) #this creates a list of 10 or 365 subsets dataset based on dates
107

  
108
#Start loop here...
109

  
110
## looping through the dates...this is the main part of the code
111
#i=1 #for debugging
112
#j=1 #for debugging
113
for(i in 1:length(dates)){            # start of the for loop #1
114
  
115
  date<-strptime(dates[i], "%Y%m%d")   # interpolation date being processed
116
  month<-strftime(date, "%m")          # current month of the date being processed
117
  LST_month<-paste("mm_",month,sep="") # name of LST month to be matched
118
  
119
  ###Regression part 1: Creating a validation dataset by creating training and testing datasets
120
  
121
  mod_LST <-ghcn.subsets[[i]][,match(LST_month, names(ghcn.subsets[[i]]))]  #Match interpolation date and monthly LST average
122
  ghcn.subsets[[i]] = transform(ghcn.subsets[[i]],LST = mod_LST)            #Add the variable LST to the subset dataset
123
  n<-nrow(ghcn.subsets[[i]])
124
  ns<-n-round(n*prop)   #Create a sample from the data frame with 70% of the rows
125
  nv<-n-ns              #create a sample for validation with prop of the rows
126
  ind.training <- sample(nrow(ghcn.subsets[[i]]), size=ns, replace=FALSE) #This selects the index position for 70% of the rows taken randomly
127
  ind.testing <- setdiff(1:nrow(ghcn.subsets[[i]]), ind.training)
128
  data_s <- ghcn.subsets[[i]][ind.training, ]   #Training dataset currently used in the modeling
129
  data_v <- ghcn.subsets[[i]][ind.testing, ]    #Testing/validation dataset
130
  
131
  #i=1
132
  date_proc<-dates[i]
133
  date_proc<-strptime(dates[i], "%Y%m%d")   # interpolation date being processed
134
  mo<-as.integer(strftime(date_proc, "%m"))          # current month of the date being processed
135
  day<-as.integer(strftime(date_proc, "%d"))
136
  year<-as.integer(strftime(date_proc, "%Y"))
137
  
138
  #setup
139
  
140
  #mo=9           #Commented out by Benoit on June 14
141
  #day=2
142
  #year=2010
143
  datelabel=format(ISOdate(year,mo,day),"%b %d, %Y")
144
  
145
  ###########
146
  #  STEP 1 - 10 year monthly averages
147
  ###########
148
  
149
  #library(raster)
150
  #old<-getwd()
151
  #setwd("c:/data/benoit/data_Oregon_stations_Brian_04242012")
152
  #l=list.files(pattern="mean_month.*rescaled.tif")
153
  #l=list.files(pattern="mean_month.*rescaled.rst")
154
  l <-readLines(paste(path,"/",infile6, sep=""))
155
  
156
  molst<-stack(l)  #Creating a raster stack...
157
  #setwd(old)
158
  molst=molst-273.16  #K->C
159
  idx <- seq(as.Date('2010-01-15'), as.Date('2010-12-15'), 'month')
160
  molst <- setZ(molst, idx)
161
  layerNames(molst) <- month.abb
162
  themolst<-raster(molst,mo) #current month being processed saved in a raster image
163
  plot(themolst)
164
  
165
  ###########
166
  # STEP 2 - Weather station means across same days: Monthly mean calculation
167
  ###########
168
  
169
  # ??? which years & what quality flags???
170
  #select ghcn.id, lat,lon, elev, month, avg(value/10) as "TMax", count(*) as "NumDays" from ghcn, stations where ghcn.id in (select id from stations where state=='OR') and ghcn.id==stations.id and value<>-9999 and year>=2000 and  element=='TMAX' group by stations.id, month;select ghcn.id, lat,lon, elev, month, avg(value/10) as "TMax", count(*) as "NumDays" from ghcn, stations where ghcn.id in (select id from stations where state=='OR') and ghcn.id==stations.id and value<>-9999 and year>=2000 and  element=='TMAX' group by stations.id, month;
171
  #below table from above SQL query
172
  #dst<-read.csv('/data/benoit/data_oregon_stations_brian_04242012/station_means.csv',h=T)
173
  
174
  ##Added by Benoit ######
175
  date1<-ISOdate(data3$year,data3$month,data3$day) #Creating a date object from 3 separate column
176
  date2<-as.POSIXlt(as.Date(date1))
177
  data3$date<-date2
178
  d<-subset(data3,year>=2000 & mflag=="0" ) #Selecting dataset 2000-2010 with good quality: 193 stations
179
  #May need some screeing??? i.e. range of temp and elevation...
180
  d1<-aggregate(value~station+month, data=d, mean)  #Calculate monthly mean for every station in OR
181
  id<-as.data.frame(unique(d1$station))     #Unique station in OR for year 2000-2010: 193 but 7 loss of monthly avg    
182
  
183
  dst<-merge(d1, stat_loc, by.x="station", by.y="STAT_ID")   #Inner join all columns are retained
184
 
185
  #This allows to change only one name of the data.frame
186
  pos<-match("value",names(dst)) #Find column with name "value"
187
  names(dst)[pos]<-c("TMax")
188
  dst$TMax<-dst$TMax/10                #TMax is the average max temp for months
189
  #dstjan=dst[dst$month==9,]  #dst contains the monthly averages for tmax for every station over 2000-2010
190
  ##############
191
  
192
  modst=dst[dst$month==mo,] #Subsetting dataset for the relevant month of the date being processed
193
  
194
  ##########
195
  # STEP 3 - get LST at stations
196
  ##########
197
  
198
  sta_lola=modst[,c("lon","lat")] #Extracting locations of stations for the current month...
199
  library(rgdal)
200
  proj_str="+proj=lcc +lat_1=43 +lat_2=45.5 +lat_0=41.75 +lon_0=-120.5 +x_0=400000 +y_0=0 +ellps=GRS80 +units=m +no_defs";
201
  lookup<-function(r,lat,lon) {
202
    xy<-project(cbind(lon,lat),proj_str);
203
    cidx<-cellFromXY(r,xy);
204
    return(r[cidx])
205
  }
206
  sta_tmax_from_lst=lookup(themolst,sta_lola$lat,sta_lola$lon) #Extracted values of LST for the stations
207
  
208
  #########
209
  # STEP 4 - bias at stations     
210
  #########
211
  
212
  sta_bias=sta_tmax_from_lst-modst$TMax; #That is the difference between the monthly LST mean and monthly station mean
213
  #Added by Benoit
214
  modst$LSTD_bias<-sta_bias  #Adding bias to data frame modst containning the monthly average for 10 years
215
  
216
  bias_xy=project(as.matrix(sta_lola),proj_str)
217
  # windows()
218
  X11()
219
  plot(modst$TMax,sta_tmax_from_lst,xlab="Station mo Tmax",ylab="LST mo Tmax",main=paste("LST vs TMax for",datelabel,sep=" "))
220
  abline(0,1)
221
  savePlot(paste("LST_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""), type="png")
222
  dev.off()
223
  
224
  ##########
225
  # Moved step 6 to allow training and validation selection for bias surface:07/11
226
  # STEP 6 - return to daily station data & calcualate delta=daily T-monthly T from stations
227
  ##########
228
  
229
  #Commmented out by Benoit 06/14
230
  # library(RSQLite)
231
  # m<-dbDriver("SQLite")
232
  # con<-dbConnect(m,dbname='c:/data/ghcn_tmintmax.db')
233
  # querystr=paste("select ghcn.id, value as 'dailyTmax' from ghcn where ghcn.id in (select id from stations where state=='OR') and value<>-9999",
234
  #    "and year==",year,"and month==",mo,"and day==",day,
235
  #                "and element=='TMAX' ")
236
  # rs<-dbSendQuery(con,querystr)
237
  # d<-fetch(rs,n=-1)
238
  # dbClearResult(rs)
239
  # dbDisconnect(con)
240
  # 
241
  # d$dailyTmax=d$dailyTmax/10 #stored as 1/10 degree C to allow integer storage
242
  # dmoday=merge(modst,d,by="id")
243
  ##########################Commented out by Benoit
244
  
245
  #added by Benoit 
246
  #x<-ghcn.subsets[[i]]  #Holds both training and testing for instance 161 rows for Jan 1
247
  x<-data_v
248
  d<-data_s
249
  
250
  pos<-match("value",names(d)) #Find column with name "value"
251
  names(d)[pos]<-c("dailyTmax")
252
  names(x)[pos]<-c("dailyTmax")
253
  d$dailyTmax=(as.numeric(d$dailyTmax))/10 #stored as 1/10 degree C to allow integer storage
254
  x$dailyTmax=(as.numeric(x$dailyTmax))/10 #stored as 1/10 degree C to allow integer storage
255
  pos<-match("station",names(d)) #Find column with name "value"
256
  names(d)[pos]<-c("id")
257
  names(x)[pos]<-c("id")
258
  names(modst)[1]<-c("id")       #modst contains the average tmax per month for every stations...
259
  dmoday=merge(modst,d,by="id")  #LOOSING DATA HERE!!! from 113 t0 103
260
  xmoday=merge(modst,x,by="id")  #LOOSING DATA HERE!!! from 48 t0 43
261
  names(dmoday)[4]<-c("lat")
262
  names(dmoday)[5]<-c("lon")     #dmoday contains all the the information: BIAS, monn
263
  names(xmoday)[4]<-c("lat")
264
  names(xmoday)[5]<-c("lon")     #dmoday contains all the the information: BIAS, monn
265
  
266
  data_v<-xmoday
267
  ###
268
  
269
  #dmoday contains the daily tmax values for training with TMax being the monthly station tmax mean
270
  #xmoday contains the daily tmax values for validation with TMax being the monthly station tmax mean
271
  
272
  # windows()
273
  X11()
274
  plot(dailyTmax~TMax,data=dmoday,xlab="Mo Tmax",ylab=paste("Daily for",datelabel),main="across stations in OR")
275
  savePlot(paste("Daily_tmax_monthly_TMax_scatterplot_",dates[i],out_prefix,".png", sep=""), type="png")
276
  dev.off()
277
  
278
  ########
279
  # STEP 5 - interpolate bias
280
  ########
281
  
282
  # ?? include covariates like elev, distance to coast, cloud frequency, tree height
283
  #library(fields)
284
  #windows()
285
  quilt.plot(sta_lola,sta_bias,main="Bias at stations",asp=1)
286
  US(add=T,col="magenta",lwd=2)
287
  #fitbias<-Tps(bias_xy,sta_bias) #use TPS or krige
288
  
289
  #Adding options to use only training stations: 07/11/2012
290
  bias_xy=project(as.matrix(sta_lola),proj_str)
291
  #bias_xy2=project(as.matrix(c(dmoday$lon,dmoday$lat),proj_str)
292
  if(bias_val==1){
293
    sta_bias<-dmoday$LSTD_bias
294
    bias_xy<-cbind(dmoday$x_OR83M,dmoday$y_OR83M)
295
  }
296
  
297
  fitbias<-Krig(bias_xy,sta_bias,theta=1e5) #use TPS or krige 
298
                                            #The output is a krig object using fields
299
  
300
  # Creating plot of bias surface and saving it
301
  X11()
302
  datelabel2=format(ISOdate(year,mo,day),"%B ") #added by Benoit, label
303
  surface(fitbias,col=rev(terrain.colors(100)),asp=1,main=paste("Interpolated bias for",datelabel2,sep=" "))
304
  savePlot(paste("Bias_surface_LST_TMax_",dates[i],out_prefix,".png", sep=""), type="png")
305
  dev.off()
306
  
307
  #US(add=T,col="magenta",lwd=2)
308

  
309
  ##########
310
  # STEP 7 - interpolate delta across space
311
  ##########
312
  
313
  daily_sta_lola=dmoday[,c("lon","lat")] #could be same as before but why assume merge does this - assume not
314
  daily_sta_xy=project(as.matrix(daily_sta_lola),proj_str)
315
  daily_delta=dmoday$dailyTmax-dmoday$TMax
316
  #windows()
317
  quilt.plot(daily_sta_lola,daily_delta,asp=1,main="Station delta for Jan 15")
318
  US(add=T,col="magenta",lwd=2)
319
  #fitdelta<-Tps(daily_sta_xy,daily_delta) #use TPS or krige
320
  fitdelta<-Krig(daily_sta_xy,daily_delta,theta=1e5) #use TPS or krige
321
                                                     #Kriging using fields package
322
 
323
  # Creating plot of bias surface and saving it
324
  X11()
325
  surface(fitdelta,col=rev(terrain.colors(100)),asp=1,main=paste("Interpolated delta for",datelabel,sep=" "))
326
  savePlot(paste("Delta_surface_LST_TMax_",dates[i],out_prefix,".png", sep=""), type="png")
327
  dev.off()
328
  #US(add=T,col="magenta",lwd=2)
329
  #
330
  
331
  #### Added by Benoit on 06/19
332
  data_s<-dmoday #put the 
333
  data_s$daily_delta<-daily_delta
334
  
335
  
336
  #data_s$y_var<-daily_delta  #y_var is the variable currently being modeled, may be better with BIAS!!
337
  data_s$y_var<-data_s$LSTD_bias
338
  #data_s$y_var<-(data_s$dailyTmax)*10
339
  #Model and response variable can be changed without affecting the script
340
  
341
  mod1<- gam(y_var~ s(lat) + s (lon) + s (ELEV_SRTM), data=data_s)
342
  mod2<- gam(y_var~ s(lat,lon)+ s(ELEV_SRTM), data=data_s) #modified nesting....from 3 to 2
343
  mod3<- gam(y_var~ s(lat) + s (lon) + s (ELEV_SRTM) +  s (Northness)+ s (Eastness) + s(DISTOC), data=data_s)
344
  mod4<- gam(y_var~ s(lat) + s (lon) + s(ELEV_SRTM) + s(Northness) + s (Eastness) + s(DISTOC) + s(LST), data=data_s)
345
  mod5<- gam(y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST), data=data_s)
346
  mod6<- gam(y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST)+s(LC1), data=data_s)
347
  mod7<- gam(y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST)+s(LC3), data=data_s)
348
  mod8<- gam(y_var~ s(lat,lon) +s(ELEV_SRTM) + s(Northness,Eastness) + s(DISTOC) + s(LST) + s(LC1), data=data_s)
349
 
350
  #Added
351
  #tmax_predicted=themolst+daily_delta_rast-bias_rast #Final surface?? but daily_rst
352
  
353
  #### Added by Benoit ends
354

  
355
  #########
356
  # STEP 8 - assemble final answer - T=LST+Bias(interpolated)+delta(interpolated)
357
  #########
358
  
359
  bias_rast=interpolate(themolst,fitbias) #interpolation using function from raster package
360
                                          #themolst is raster layer, fitbias is "Krig" object from bias surface
361
  plot(bias_rast,main="Raster bias") #This not displaying...
362
  
363
  #Saving kriged surface in raster images
364
  data_name<-paste("bias_LST_",dates[[i]],sep="")
365
  raster_name<-paste("fusion_",data_name,out_prefix,".rst", sep="")
366
  writeRaster(bias_rast, filename=raster_name,overwrite=TRUE)  #Writing the data in a raster file format...(IDRISI)
367
  
368
  daily_delta_rast=interpolate(themolst,fitdelta) #Interpolation of the bias surface...
369
  
370
  plot(daily_delta_rast,main="Raster Daily Delta")
371
  
372
  #Saving kriged surface in raster images
373
  data_name<-paste("daily_delta_",dates[[i]],sep="")
374
  raster_name<-paste("fusion_",data_name,out_prefix,".rst", sep="")
375
  writeRaster(daily_delta_rast, filename=raster_name,overwrite=TRUE)  #Writing the data in a raster file format...(IDRISI)
376
  
377
  tmax_predicted=themolst+daily_delta_rast-bias_rast #Final surface  as a raster layer...
378
  #tmax_predicted=themolst+daily_delta_rast+bias_rast #Added by Benoit, why is it -bias_rast
379
  plot(tmax_predicted,main="Predicted daily")
380
  
381
  #Saving kriged surface in raster images
382
  data_name<-paste("tmax_predicted_",dates[[i]],sep="")
383
  raster_name<-paste("fusion_",data_name,out_prefix,".rst", sep="")
384
  writeRaster(tmax_predicted, filename=raster_name,overwrite=TRUE)  #Writing the data in a raster file format...(IDRISI)
385
  
386
  ########
387
  # check: assessment of results: validation
388
  ########
389
  RMSE<-function(x,y) {return(mean((x-y)^2)^0.5)}
390
  
391
  #FIT ASSESSMENT
392
  sta_pred_data_s=lookup(tmax_predicted,data_s$lat,data_s$lon)
393
  rmse_fit=RMSE(sta_pred_data_s,data_s$dailyTmax)
394
  
395
  sta_pred=lookup(tmax_predicted,data_v$lat,data_v$lon)
396
  #sta_pred=lookup(tmax_predicted,daily_sta_lola$lat,daily_sta_lola$lon)
397
  #rmse=RMSE(sta_pred,dmoday$dailyTmax)
398
  #pos<-match("value",names(data_v)) #Find column with name "value"
399
  #names(data_v)[pos]<-c("dailyTmax")
400
  tmax<-data_v$dailyTmax
401
  #data_v$dailyTmax<-tmax
402
  rmse=RMSE(sta_pred,tmax)
403
  #plot(sta_pred~dmoday$dailyTmax,xlab=paste("Actual daily for",datelabel),ylab="Pred daily",main=paste("RMSE=",rmse))
404
  X11()
405
  plot(sta_pred~tmax,xlab=paste("Actual daily for",datelabel),ylab="Pred daily",main=paste("RMSE=",rmse))
406
  abline(0,1)
407
  savePlot(paste("Predicted_tmax_versus_observed_scatterplot_",dates[i],out_prefix,".png", sep=""), type="png")
408
  dev.off()
409
  #resid=sta_pred-dmoday$dailyTmax
410
  resid=sta_pred-tmax
411
  quilt.plot(daily_sta_lola,resid)
412
  
413
  ### END OF BRIAN's code
414
  
415
  ### Added by benoit
416
  #Store results using TPS
417
  j=9
418
  results_RMSE[i,1]<- dates[i]    #storing the interpolation dates in the first column
419
  results_RMSE[i,2]<- ns          #number of stations used in the training stage
420
  results_RMSE[i,3]<- "RMSE"
421
  results_RMSE[i,j+3]<- rmse  #Storing RMSE for the model j
422
  
423
  results_RMSE_f[i,1]<- dates[i]    #storing the interpolation dates in the first column
424
  results_RMSE_f[i,2]<- ns          #number of stations used in the training stage
425
  results_RMSE_f[i,3]<- "RMSE"
426
  results_RMSE_f[i,j+3]<- rmse_fit  #Storing RMSE for the model j
427
  
428
  ns<-nrow(data_s) #This is added to because some loss of data might have happened because of the averaging...
429
  nv<-nrow(data_v)         
430
  
431
  for (j in 1:length(models)){
432
    
433
    ##Model assessment: specific diagnostic/metrics for GAM
434
    
435
    name<-paste("mod",j,sep="")  #modj is the name of The "j" model (mod1 if j=1) 
436
    mod<-get(name)               #accessing GAM model ojbect "j"
437
    results_AIC[i,1]<- dates[i]  #storing the interpolation dates in the first column
438
    results_AIC[i,2]<- ns        #number of stations used in the training stage
439
    results_AIC[i,3]<- "AIC"
440
    results_AIC[i,j+3]<- AIC (mod)
441
    
442
    results_GCV[i,1]<- dates[i]  #storing the interpolation dates in the first column
443
    results_GCV[i,2]<- ns        #number of stations used in the training 
444
    results_GCV[i,3]<- "GCV"
445
    results_GCV[i,j+3]<- mod$gcv.ubre
446
    
447
    results_DEV[i,1]<- dates[i]  #storing the interpolation dates in the first column
448
    results_DEV[i,2]<- ns        #number of stations used in the training stage
449
    results_DEV[i,3]<- "DEV"
450
    results_DEV[i,j+3]<- mod$deviance
451
    
452
    sta_LST_s=lookup(themolst,data_s$lat,data_s$lon)
453
    sta_delta_s=lookup(daily_delta_rast,data_s$lat,data_s$lon) #delta surface has been calculated before!!
454
    sta_bias_s= mod$fit
455
    #Need to extract values from the kriged delta surface...
456
    #sta_delta= lookup(delta_surface,data_v$lat,data_v$lon)
457
    #tmax_predicted=sta_LST+sta_bias-y_mod$fit
458
    tmax_predicted_s= sta_LST_s-sta_bias_s+sta_delta_s
459
    
460
    results_RMSE_f[i,1]<- dates[i]  #storing the interpolation dates in the first column
461
    results_RMSE_f[i,2]<- ns        #number of stations used in the training stage
462
    results_RMSE_f[i,3]<- "RSME"
463
    results_RMSE_f[i,j+3]<- sqrt(sum((tmax_predicted_s-data_s$dailyTmax)^2)/ns)
464
    
465
    ##Model assessment: general diagnostic/metrics
466
    ##validation: using the testing data
467
    
468
    #This was modified on 06192012
469
    
470
    #data_v$y_var<-data_v$LSTD_bias
471
    #data_v$y_var<-tmax
472
    y_mod<- predict(mod, newdata=data_v, se.fit = TRUE) #Using the coeff to predict new values.
473
    
474
    ####ADDED ON JULY 5th
475
    sta_LST_v=lookup(themolst,data_v$lat,data_v$lon)
476
    sta_delta_v=lookup(daily_delta_rast,data_v$lat,data_v$lon) #delta surface has been calculated before!!
477
    sta_bias_v= y_mod$fit
478
    #Need to extract values from the kriged delta surface...
479
    #sta_delta= lookup(delta_surface,data_v$lat,data_v$lon)
480
    #tmax_predicted=sta_LST+sta_bias-y_mod$fit
481
    tmax_predicted_v= sta_LST_v-sta_bias_v+sta_delta_v
482
    
483
    #data_v$tmax<-(data_v$tmax)/10
484
    res_mod<- data_v$dailyTmax - tmax_predicted_v              #Residuals for the model for fusion
485
    #res_mod<- data_v$y_var - y_mod$fit                  #Residuals for the model
486
    
487
    RMSE_mod <- sqrt(sum(res_mod^2)/nv)                 #RMSE FOR REGRESSION STEP 1: GAM     
488
    MAE_mod<- sum(abs(res_mod))/nv                     #MAE, Mean abs. Error FOR REGRESSION STEP 1: GAM   
489
    ME_mod<- sum(res_mod)/nv                            #ME, Mean Error or bias FOR REGRESSION STEP 1: GAM
490
    R2_mod<- cor(data_v$dailyTmax,tmax_predicted_v)^2              #R2, coef. of var FOR REGRESSION STEP 1: GAM
491
    
492
    results_RMSE[i,1]<- dates[i]    #storing the interpolation dates in the first column
493
    results_RMSE[i,2]<- ns          #number of stations used in the training stage
494
    results_RMSE[i,3]<- "RMSE"
495
    results_RMSE[i,j+3]<- RMSE_mod  #Storing RMSE for the model j
496
    results_MAE[i,1]<- dates[i]     #storing the interpolation dates in the first column
497
    results_MAE[i,2]<- ns           #number of stations used in the training stage
498
    results_MAE[i,3]<- "MAE"
499
    results_MAE[i,j+3]<- MAE_mod    #Storing MAE for the model j
500
    results_ME[i,1]<- dates[i]      #storing the interpolation dates in the first column
501
    results_ME[i,2]<- ns            #number of stations used in the training stage
502
    results_ME[i,3]<- "ME"
503
    results_ME[i,j+3]<- ME_mod      #Storing ME for the model j
504
    results_R2[i,1]<- dates[i]      #storing the interpolation dates in the first column
505
    results_R2[i,2]<- ns            #number of stations used in the training stage
506
    results_R2[i,3]<- "R2"
507
    results_R2[i,j+3]<- R2_mod      #Storing R2 for the model j
508
    
509
    #Saving residuals and prediction in the dataframes: tmax predicted from GAM
510
    pred<-paste("pred_mod",j,sep="")
511
    #data_v[[pred]]<-as.numeric(y_mod$fit)
512
    data_v[[pred]]<-as.numeric(tmax_predicted_v)
513
    data_s[[pred]]<-as.numeric(tmax_predicted_s) #Storing model fit values (predicted on training sample)
514
    #data_s[[pred]]<-as.numeric(mod$fit) #Storing model fit values (predicted on training sample)
515
    
516
    name2<-paste("res_mod",j,sep="")
517
    data_v[[name2]]<-as.numeric(res_mod)
518
    temp<-tmax_predicted_s-data_s$dailyTmax
519
    data_s[[name2]]<-as.numeric(temp)
520
    #end of loop calculating RMSE
521
    
522
  }
523

  
524
  # end of the for loop1
525
  
526
}
527

  
528

  
529
## Plotting and saving diagnostic measures
530

  
531
#Specific diagnostic measures related to the testing datasets
532

  
533
results_table_RMSE<-as.data.frame(results_RMSE)
534
results_table_MAE<-as.data.frame(results_MAE)
535
results_table_ME<-as.data.frame(results_ME)
536
results_table_R2<-as.data.frame(results_R2)
537

  
538
results_table_RMSE_f<-as.data.frame(results_RMSE_f)
539

  
540
cname<-c("dates","ns","metric","mod1", "mod2","mod3", "mod4", "mod5", "mod6", "mod7","mod8","mod9")
541
colnames(results_table_RMSE)<-cname
542
colnames(results_table_RMSE_f)<-cname
543

  
544
#tb_diagnostic1<-rbind(results_table_RMSE,results_table_MAE, results_table_ME, results_table_R2)   #
545
tb_diagnostic1<-results_table_RMSE      #measures of validation
546
tb_diagnostic2<-results_table_RMSE_f    #measures of fit
547

  
548
write.table(tb_diagnostic1, file= paste(path,"/","results_fusion_Assessment_measure1",out_prefix,".txt",sep=""), sep=",")
549
write.table(tb_diagnostic2, file= paste(path,"/","results_fusion_Assessment_measure2",out_prefix,".txt",sep=""), sep=",")
550

  
551
#Calculate mean
552

  
553
tb<-tb_diagnostic1
554
for(i in 4:12){            # start of the for loop #1
555
  tb[,i]<-as.numeric(as.character(tb[,i]))  
556
}
557
  
558
mean(tb[,4:12])
559
boxplot(tb[,4:12],outline=FALSE)
560
#### END OF SCRIPT

Also available in: Unified diff