Project

General

Profile

Download (13.1 KB) Statistics
| Branch: | Revision:
1
##################    CLIMATE INTERPOLATION FUSION METHOD   #######################################
2
############################ Merging LST and station data ##########################################
3
#This script interpolates tmax values using MODIS LST and GHCND station data                     #
4
#interpolation area. It requires the text file of stations and a shape file of the study area.   #       
5
#Note that the projection for both GHCND and study area is lonlat WGS84.                         #
6
#AUTHOR: Brian McGill                                                                            #
7
#DATE: 07/11/2012                                                                                 #
8
#PROJECT: NCEAS INPLANT: Environment and Organisms --TASK#363--                                  #
9
###################################################################################################
10

    
11
###Loading R library and packages                                                      
12
library(gtools)                                         # loading some useful tools 
13
library(mgcv)                                           # GAM package by Simon Wood
14
library(sp)                                             # Spatial pacakge with class definition by Bivand et al.
15
library(spdep)                               # Spatial pacakge with methods and spatial stat. by Bivand et al.
16
library(rgdal)                               # GDAL wrapper for R, spatial utilities
17
library(gstat)                               # Kriging and co-kriging by Pebesma et al.
18
library(fields)                              # NCAR Spatial Interpolation methods such as kriging, splines
19
library(raster)                              # Hijmans et al. package for raster processing
20
library(rasterVis)
21
library(parallel)                            # Urbanek S. and Ripley B., package for multi cores & parralel processing
22

    
23
### Parameters and argument
24

    
25
infile1<- "ghcn_or_tmax_covariates_06262012_OR83M.shp"             #GHCN shapefile containing variables for modeling 2010                 
26
#tinfile2<-"list_10_dates_04212012.txt"                     #List of 10 dates for the regression
27
#infile2<-"list_2_dates_04212012.txt"
28
infile2<-"list_365_dates_04212012.txt"
29
infile3<-"LST_dates_var_names.txt"                        #LST dates name
30
infile4<-"models_interpolation_05142012.txt"              #Interpolation model names
31
infile5<-"mean_day244_rescaled.rst"                       #Raster or grid for the locations of predictions
32
#infile6<-"lst_climatology.txt"
33
infile6<-"LST_files_monthly_climatology.txt"
34
inlistf<-"list_files_05032012.txt"                        #Stack of images containing the Covariates
35

    
36

    
37
#path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations"
38
path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations_07192012_GAM"
39
#path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations_GAM"
40
#path<-"/home/parmentier/Data/IPLANT_project/data_Oregon_stations_07152012"     #Jupiter LOCATION on Atlas for kriging"
41
#path<-"M:/Data/IPLANT_project/data_Oregon_stations"   #Locations on Atlas
42

    
43
#Station location of the study area
44
stat_loc<-read.table(paste(path,"/","location_study_area_OR_0602012.txt",sep=""),sep=",", header=TRUE)
45
#GHCN Database for 1980-2010 for study area (OR) 
46
data3<-read.table(paste(path,"/","ghcn_data_TMAXy1980_2010_OR_0602012.txt",sep=""),sep=",", header=TRUE)
47

    
48
nmodels<-8   #number of models running
49
y_var_name<-"dailyTmax"
50
predval<-1
51
prop<-0.3                                                                           #Proportion of testing retained for validation   
52
#prop<-0.25
53
seed_number<- 100                                                                   #Seed number for random sampling
54
out_prefix<-"_07242012_365d_GAM_fusion5"                                                   #User defined output prefix
55
setwd(path)
56
bias_val<-0            #if value 1 then training data is used in the bias surface rather than the all monthly stations
57

    
58
#source("fusion_function_07192012.R")
59
source("GAM_fusion_function_07192012d.R")
60
############ START OF THE SCRIPT ##################
61
#
62
#
63
### Step 0/Step 6 in Brian's code...preparing year 2010 data for modeling 
64
#
65

    
66
###Reading the station data and setting up for models' comparison
67
filename<-sub(".shp","",infile1)             #Removing the extension from file.
68
ghcn<-readOGR(".", filename)                 #reading shapefile 
69

    
70
CRS<-proj4string(ghcn)                       #Storing projection information (ellipsoid, datum,etc.)
71

    
72
mean_LST<- readGDAL(infile5)                 #Reading the whole raster in memory. This provides a grid for kriging
73
proj4string(mean_LST)<-CRS                   #Assigning coordinate information to prediction grid.
74

    
75
ghcn <- transform(ghcn,Northness = cos(ASPECT*pi/180)) #Adding a variable to the dataframe
76
ghcn <- transform(ghcn,Eastness = sin(ASPECT*pi/180))  #adding variable to the dataframe.
77
ghcn <- transform(ghcn,Northness_w = sin(slope*pi/180)*cos(ASPECT*pi/180)) #Adding a variable to the dataframe
78
ghcn <- transform(ghcn,Eastness_w = sin(slope*pi/180)*sin(ASPECT*pi/180))  #adding variable to the dataframe.
79

    
80
#Remove NA for LC and CANHEIGHT
81
ghcn$LC1[is.na(ghcn$LC1)]<-0
82
ghcn$LC3[is.na(ghcn$LC3)]<-0
83
ghcn$CANHEIGHT[is.na(ghcn$CANHEIGHT)]<-0
84

    
85
dates <-readLines(paste(path,"/",infile2, sep=""))
86
LST_dates <-readLines(paste(path,"/",infile3, sep=""))
87
models <-readLines(paste(path,"/",infile4, sep=""))
88

    
89
##Extracting the variables values from the raster files                                             
90

    
91
lines<-read.table(paste(path,"/",inlistf,sep=""), sep=" ")                  #Column 1 contains the names of raster files
92
inlistvar<-lines[,1]
93
inlistvar<-paste(path,"/",as.character(inlistvar),sep="")
94
covar_names<-as.character(lines[,2])                                         #Column two contains short names for covaraites
95

    
96
s_raster<- stack(inlistvar)                                                  #Creating a stack of raster images from the list of variables.
97
layerNames(s_raster)<-covar_names                                            #Assigning names to the raster layers
98
projection(s_raster)<-CRS
99

    
100
#stat_val<- extract(s_raster, ghcn3)                                          #Extracting values from the raster stack for every point location in coords data frame.
101
pos<-match("ASPECT",layerNames(s_raster)) #Find column with name "value"
102
r1<-raster(s_raster,layer=pos)             #Select layer from stack
103
pos<-match("slope",layerNames(s_raster)) #Find column with name "value"
104
r2<-raster(s_raster,layer=pos)             #Select layer from stack
105
N<-cos(r1*pi/180)
106
E<-sin(r1*pi/180)
107
Nw<-sin(r2*pi/180)*cos(r1*pi/180)   #Adding a variable to the dataframe
108
Ew<-sin(r2*pi/180)*sin(r1*pi/180)   #Adding variable to the dataframe.
109

    
110
pos<-match("LC1",layerNames(s_raster)) #Find column with name "value"
111
LC1<-raster(s_raster,layer=pos)             #Select layer from stack
112
s_raster<-dropLayer(s_raster,pos)
113
LC1[is.na(LC1)]<-0
114
pos<-match("LC3",layerNames(s_raster)) #Find column with name "value"
115
LC3<-raster(s_raster,layer=pos)             #Select layer from stack
116
s_raster<-dropLayer(s_raster,pos)
117
LC3[is.na(LC3)]<-0
118

    
119
xy<-coordinates(r1)  #get x and y projected coordinates...
120
xy_latlon<-project(xy, CRS, inv=TRUE) # find lat long for projected coordinats (or pixels...)
121
lon<-raster(xy_latlon) #Transform a matrix into a raster object ncol=ncol(r1), nrow=nrow(r1))
122
ncol(lon)<-ncol(r1)
123
nrow(lon)<-nrow(r1)
124
extent(lon)<-extent(r1)
125
projection(lon)<-CRS  #At this stage this is still an empty raster with 536 nrow and 745 ncell 
126
lat<-lon
127
values(lon)<-xy_latlon[,1]
128
values(lat)<-xy_latlon[,2]
129

    
130
r<-stack(N,E,Nw,Ew,lon,lat,LC1,LC3)
131
rnames<-c("Northness","Eastness","Northness_w","Eastness_w", "lon","lat","LC1","LC3")
132
layerNames(r)<-rnames
133
s_raster<-addLayer(s_raster, r)
134

    
135
#s_sgdf<-as(s_raster,"SpatialGridDataFrame") #Conversion to spatial grid data frame
136

    
137
####### Preparing LST stack of climatology...
138

    
139
#l=list.files(pattern="mean_month.*rescaled.rst")
140
l <-readLines(paste(path,"/",infile6, sep=""))
141
molst<-stack(l)  #Creating a raster stack...
142
#setwd(old)
143
molst<-molst-273.16  #K->C          #LST stack of monthly average...
144
idx <- seq(as.Date('2010-01-15'), as.Date('2010-12-15'), 'month')
145
molst <- setZ(molst, idx)
146
layerNames(molst) <- month.abb
147

    
148

    
149
######  Preparing tables for model assessment: specific diagnostic/metrics
150

    
151
#Model assessment: specific diagnostics/metrics
152
results_AIC<- matrix(1,1,nmodels+3)  
153
results_GCV<- matrix(1,1,nmodels+3)
154
results_DEV<- matrix(1,1,nmodels+3)
155
#results_RMSE_f<- matrix(1,length(models)+3)
156

    
157
#Model assessment: general diagnostic/metrics 
158
results_RMSE <- matrix(1,1,nmodels+4)
159
results_MAE <- matrix(1,1,nmodels+4)
160
results_ME <- matrix(1,1,nmodels+4)       #There are 8+1 models
161
results_R2 <- matrix(1,1,nmodels+4)       #Coef. of determination for the validation dataset
162

    
163
results_RMSE_f<- matrix(1,1,nmodels+4)    #RMSE fit, RMSE for the training dataset
164
results_MAE_f <- matrix(1,1,nmodels+4)
165

    
166
######## Preparing monthly averages from the ProstGres database
167

    
168
# do this work outside of (before) this function
169
# to avoid making a copy of the data frame inside the function call
170
date1<-ISOdate(data3$year,data3$month,data3$day) #Creating a date object from 3 separate column
171
date2<-as.POSIXlt(as.Date(date1))
172
data3$date<-date2
173
d<-subset(data3,year>=2000 & mflag=="0" ) #Selecting dataset 2000-2010 with good quality: 193 stations
174
#May need some screeing??? i.e. range of temp and elevation...
175
d1<-aggregate(value~station+month, data=d, mean)  #Calculate monthly mean for every station in OR
176
id<-as.data.frame(unique(d1$station))     #Unique station in OR for year 2000-2010: 193 but 7 loss of monthly avg    
177

    
178
dst<-merge(d1, stat_loc, by.x="station", by.y="STAT_ID")   #Inner join all columns are retained
179

    
180
#This allows to change only one name of the data.frame
181
pos<-match("value",names(dst)) #Find column with name "value"
182
names(dst)[pos]<-c("TMax")
183
dst$TMax<-dst$TMax/10                #TMax is the average max temp for monthy data
184
#dstjan=dst[dst$month==9,]  #dst contains the monthly averages for tmax for every station over 2000-2010
185

    
186
######### Preparing daily values for training and testing
187

    
188
#Screening for bad values: value is tmax in this case
189
#ghcn$value<-as.numeric(ghcn$value)
190
ghcn_all<-ghcn
191
ghcn_test<-subset(ghcn,ghcn$value>-150 & ghcn$value<400)
192
ghcn_test2<-subset(ghcn_test,ghcn_test$ELEV_SRTM>0)
193
ghcn<-ghcn_test2
194
#coords<- ghcn[,c('x_OR83M','y_OR83M')]
195

    
196
##Sampling: training and testing sites...
197

    
198
set.seed(seed_number)                        #Using a seed number allow results based on random number to be compared...
199
ghcn.subsets <-lapply(dates, function(d) subset(ghcn, date==d)) #this creates a list of 10 or 365 subsets dataset based on dates
200
sampling<-vector("list",length(dates))
201

    
202
for(i in 1:length(dates)){
203
n<-nrow(ghcn.subsets[[i]])
204
ns<-n-round(n*prop)   #Create a sample from the data frame with 70% of the rows
205
nv<-n-ns              #create a sample for validation with prop of the rows
206
ind.training <- sample(nrow(ghcn.subsets[[i]]), size=ns, replace=FALSE) #This selects the index position for 70% of the rows taken randomly
207
ind.testing <- setdiff(1:nrow(ghcn.subsets[[i]]), ind.training)
208
sampling[[i]]<-ind.training
209
}
210

    
211
######## Prediction for the range of dates
212

    
213
#Start loop here...
214

    
215
## looping through the dates...this is the main part of the code
216
#i=1 #for debugging
217
#j=1 #for debugging
218
#for(i in 1:length(dates)){     [[       # start of the for loop #1
219
#i=1
220

    
221
#mclapply(1:length(dates), runFusion, mc.cores = 8)#This is the end bracket from mclapply(...) statement
222
#source("GAM_fusion_function_07192012d.R")
223

    
224
gam_fus_mod<-mclapply(1:length(dates), runGAMFusion,mc.preschedule=FALSE,mc.cores = 8) #This is the end bracket from mclapply(...) statement
225
#fusion_mod357<-mclapply(357:365,runFusion, mc.cores=8)# for debugging
226
#test<-runFusion(362) #date 362 has problems with GAM
227
#test<-mclapply(357,runFusion, mc.cores=1)# for debugging
228

    
229
## Plotting and saving diagnostic measures
230
accuracy_tab_fun<-function(i,f_list){
231
tb<-f_list[[i]][[3]]
232
return(tb)
233
}
234

    
235

    
236
tb<-gam_fus_mod[[1]][[3]][0,]  #empty data frame with metric table structure that can be used in rbinding...
237
tb_tmp<-gam_fus_mod #copy
238

    
239
for (i in 1:length(tb_tmp)){
240
  tmp<-tb_tmp[[i]][[3]]
241
  tb<-rbind(tb,tmp)
242
}
243
rm(tb_tmp)
244

    
245
for(i in 4:12){            # start of the for loop #1
246
  tb[,i]<-as.numeric(as.character(tb[,i]))  
247
}
248

    
249
tb_RMSE<-subset(tb, metric=="RMSE")
250
tb_MAE<-subset(tb,metric=="MAE")
251
tb_ME<-subset(tb,metric=="ME")
252
tb_R2<-subset(tb,metric=="R2")
253
tb_RMSE_f<-subset(tb, metric=="RMSE_f")
254
tb_MAE_f<-subset(tb,metric=="MAE_f")
255

    
256

    
257
tb_diagnostic1<-rbind(tb_RMSE,tb_MAE,tb_ME,tb_R2)
258
#tb_diagnostic2<-rbind(tb_,tb_MAE,tb_ME,tb_R2)
259

    
260
mean_RMSE<-sapply(tb_RMSE[,4:12],mean)
261
mean_MAE<-sapply(tb_MAE[,4:12],mean)
262

    
263
#tb<-sapply(fusion_mod,accuracy_tab_fun)
264

    
265
write.table(tb_diagnostic1, file= paste(path,"/","results2_fusion_Assessment_measure1",out_prefix,".txt",sep=""), sep=",")
266
write.table(tb, file= paste(path,"/","results2_fusion_Assessment_measure_all",out_prefix,".txt",sep=""), sep=",")
267
save(gam_fus_mod,file= paste(path,"/","results2_fusion_Assessment_measure_all",out_prefix,".RData",sep=""))
268
#tb<-as.data.frame(tb_diagnostic1)
269

    
270
#write.table(tb_1, file= paste(path,"/","results2_fusion_Assessment_measure1",out_prefix,".txt",sep=""), sep=",")
271

    
272
#write.table(tb_diagnostic2, file= paste(path,"/","results_fusion_Assessment_measure2",out_prefix,".txt",sep=""), sep=",")
273

    
274
#### END OF SCRIPT
(17-17/32)