Revision bcfff168
Added by Benoit Parmentier over 10 years ago
climate/research/oregon/interpolation/global_run_scalingup_assessment_part1.R | ||
---|---|---|
1 | 1 |
#################################### INTERPOLATION OF TEMPERATURES ####################################### |
2 |
############################ Script for assessment of scaling up on NEX ############################## |
|
2 |
############################ Script for assessment of scaling up on NEX: part 1 ##############################
|
|
3 | 3 |
#This script uses the worklfow code applied to the globe. Results currently reside on NEX/PLEIADES NASA. |
4 |
#Accuracy methods are added in the the function scripts to evaluate results.
|
|
5 |
#Analyses, figures, tables and data are also produced in the script.
|
|
4 |
#The purpose is to create as set of functions to diagnose and assess quickly a set of predictd tiles.
|
|
5 |
#Part 1 create summary tables and inputs for figure in part 2 and part 3.
|
|
6 | 6 |
#AUTHOR: Benoit Parmentier |
7 | 7 |
#CREATED ON: 03/23/2014 |
8 |
#MODIFIED ON: 05/15/2014
|
|
8 |
#MODIFIED ON: 05/29/2014
|
|
9 | 9 |
#Version: 3 |
10 |
#PROJECT: Environmental Layers project |
|
10 |
#PROJECT: Environmental Layers project |
|
11 |
#TO DO: |
|
12 |
# - generate delta and clim mosaic |
|
13 |
# - generate monthly inputs data_month |
|
14 |
# - generate table of number of observations per tile for use in map part 2 |
|
15 |
# - generate data_s and data_v inputs as giant table |
|
16 |
# - generate accuracy for mosaic (part 2 and part3) |
|
17 |
# - clean up |
|
11 | 18 |
################################################################################################# |
12 | 19 |
|
13 | 20 |
### Loading R library and packages |
... | ... | |
149 | 156 |
return(rast_list) |
150 | 157 |
} |
151 | 158 |
|
152 |
extract_daily_training_testing_info<- function(i,list_param){ |
|
153 |
#This function extracts training and testing information from the raster object produced for each tile |
|
154 |
#This is looping through tiles... |
|
155 |
|
|
156 |
### Function: |
|
157 |
pred_data_info_fun <- function(k,list_data,pred_mod,sampling_dat_info){ |
|
159 |
### Function: |
|
160 |
pred_data_info_fun <- function(k,list_data,pred_mod,sampling_dat_info){ |
|
161 |
#Summarizing input info |
|
158 | 162 |
|
159 |
data <- list_data[[k]] |
|
160 |
sampling_dat <- sampling_dat_info[[k]] |
|
161 |
if(data_day!="try-error"){ |
|
162 |
n <- nrow(data) |
|
163 |
n_mod <- vector("numeric",length(pred_mod)) |
|
164 |
for(j in 1:length(pred_mod)){ |
|
165 |
n_mod[j] <- sum(!is.na(data[[pred_mod[j]]])) |
|
166 |
} |
|
167 |
n <- rep(n,length(pred_mod)) |
|
168 |
sampling_dat <- sampling_dat[rep(seq_len(nrow(sampling_dat)), each=length(pred_mod)),] |
|
169 |
row.names(sampling_dat) <- NULL |
|
170 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
171 |
df_n <- cbind(df_n,sampling_dat) |
|
172 |
}else{ |
|
173 |
n <- rep(NA,length(pred_mod)) |
|
174 |
n_mod <- vector("numeric",length(pred_mod)) |
|
175 |
n_mod <- rep(NA,length(pred_mod)) |
|
176 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
177 |
sampling_dat <- sampling_dat[rep(seq_len(nrow(sampling_dat)), each=length(pred_mod)),] |
|
178 |
row.names(sampling_dat) <- NULL |
|
179 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
180 |
df_n <- cbind(df_n,sampling_dat) |
|
181 |
|
|
163 |
data <- list_data[[k]] |
|
164 |
sampling_dat <- sampling_dat_info[[k]] |
|
165 |
if(data_day!="try-error"){ |
|
166 |
n <- nrow(data) |
|
167 |
n_mod <- vector("numeric",length(pred_mod)) |
|
168 |
for(j in 1:length(pred_mod)){ |
|
169 |
n_mod[j] <- sum(!is.na(data[[pred_mod[j]]])) |
|
182 | 170 |
} |
183 |
return(df_n) |
|
171 |
n <- rep(n,length(pred_mod)) |
|
172 |
sampling_dat <- sampling_dat[rep(seq_len(nrow(sampling_dat)), each=length(pred_mod)),] |
|
173 |
row.names(sampling_dat) <- NULL |
|
174 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
175 |
df_n <- cbind(df_n,sampling_dat) |
|
176 |
}else{ |
|
177 |
n <- rep(NA,length(pred_mod)) |
|
178 |
n_mod <- vector("numeric",length(pred_mod)) |
|
179 |
n_mod <- rep(NA,length(pred_mod)) |
|
180 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
181 |
sampling_dat <- sampling_dat[rep(seq_len(nrow(sampling_dat)), each=length(pred_mod)),] |
|
182 |
row.names(sampling_dat) <- NULL |
|
183 |
df_n <- data.frame(n,n_mod,pred_mod) |
|
184 |
df_n <- cbind(df_n,sampling_dat) |
|
185 |
|
|
184 | 186 |
} |
187 |
|
|
188 |
return(df_n) |
|
189 |
} |
|
185 | 190 |
|
191 |
extract_daily_training_testing_info<- function(i,list_param){ |
|
192 |
#This function extracts training and testing information from the raster object produced for each tile |
|
193 |
#This is looping through tiles... |
|
194 |
|
|
195 |
##Functions used |
|
196 |
#Defined outside this script: |
|
197 |
#pred_data_info_fun |
|
198 |
|
|
186 | 199 |
##### Parse parameters and arguments #### |
187 | 200 |
|
188 | 201 |
raster_obj_file <- list_param$list_raster_obj_files[i] |
... | ... | |
230 | 243 |
#pred_data_day_v_info$tile_id <- rep(tile_id,nrow(pred_data_day_v_info)) |
231 | 244 |
|
232 | 245 |
} |
246 |
|
|
233 | 247 |
if(use_month==TRUE){ |
234 | 248 |
|
235 | 249 |
list_data_month_s <- try(extract_list_from_list_obj(raster_obj$validation_mod_month_obj,"data_s")) |
... | ... | |
305 | 319 |
#### Parameters and constants |
306 | 320 |
|
307 | 321 |
#in_dir1 <- "/data/project/layers/commons/NEX_data/test_run1_03232014/output" #On Atlas |
308 |
in_dir1 <- "/nobackupp4/aguzman4/climateLayers/output4" #On NEX
|
|
322 |
in_dir1 <- "/nobackupp4/aguzman4/climateLayers/output10Deg/reg1/" #On NEX
|
|
309 | 323 |
|
310 | 324 |
#in_dir_list <- list.dirs(path=in_dir1) #get the list of directories with resutls by 10x10 degree tiles |
311 | 325 |
#use subset for now: |
312 | 326 |
|
313 |
in_dir_list <- file.path(in_dir1,read.table(file.path(in_dir1,"processed.txt"))$V1) |
|
327 |
in_dir_list <- c( |
|
328 |
"/nobackupp4/aguzman4/climateLayers/output10Deg/reg1/40.0_-120.0/", |
|
329 |
"/nobackupp4/aguzman4/climateLayers/output10Deg/reg1/35.0_-115.0/") |
|
330 |
|
|
331 |
#in_dir_list <- file.path(in_dir1,read.table(file.path(in_dir1,"processed.txt"))$V1) |
|
314 | 332 |
#in_dir_list <- as.list(in_dir_list[-1]) |
315 | 333 |
#in_dir_list <- in_dir_list[grep("bak",basename(basename(in_dir_list)),invert=TRUE)] #the first one is the in_dir1 |
316 | 334 |
#in_dir_shp <- in_dir_list[grep("shapefiles",basename(in_dir_list),invert=FALSE)] #select directory with shapefiles... |
317 |
in_dir_shp <- "/nobackupp4/aguzman4/climateLayers/output4/subset/shapefiles/"
|
|
335 |
in_dir_shp <- "/nobackupp4/aguzman4/climateLayers/output10Deg/reg1/subset/shapefiles/"
|
|
318 | 336 |
#in_dir_list <- in_dir_list[grep("shapefiles",basename(in_dir_list),invert=TRUE)] |
319 | 337 |
#the first one is the in_dir1 |
320 | 338 |
# the last directory contains shapefiles |
321 | 339 |
y_var_name <- "dailyTmax" |
322 | 340 |
interpolation_method <- c("gam_CAI") |
323 |
out_prefix<-"run2_global_analyses_05122014"
|
|
341 |
out_prefix<-"run3_global_analyses_05292014"
|
|
324 | 342 |
|
325 | 343 |
#out_dir<-"/data/project/layers/commons/NEX_data/" #On NCEAS Atlas |
326 | 344 |
out_dir <- "/nobackup/bparmen1/" #on NEX |
... | ... | |
340 | 358 |
|
341 | 359 |
CRS_locs_WGS84<-CRS("+proj=longlat +ellps=WGS84 +datum=WGS84 +towgs84=0,0,0") #Station coords WGS84 |
342 | 360 |
|
361 |
day_to_mosaic <- c("20100101","20100901") |
|
362 |
file_format <- ".tif" #format for mosaiced files |
|
363 |
NA_flag_val <- -9999 #No data value |
|
364 |
|
|
365 |
#day_to_mosaic <- NULL #if day to mosaic is null then mosaic all dates |
|
366 |
|
|
343 | 367 |
##raster_prediction object : contains testing and training stations with RMSE and model object |
344 | 368 |
|
345 | 369 |
list_raster_obj_files <- lapply(in_dir_list,FUN=function(x){list.files(path=x,pattern="^raster_prediction_obj.*.RData",full.names=T)}) |
... | ... | |
363 | 387 |
|
364 | 388 |
#First create table of tiles under analysis and their coord |
365 | 389 |
df_tile_processed <- data.frame(tile_coord=basename(in_dir_list)) |
366 |
df_tile_processed$tile_id <- unlist(list_names_tile_id) |
|
390 |
df_tile_processed$tile_id <- unlist(list_names_tile_id) #Arbitrary tiling number!!
|
|
367 | 391 |
df_tile_processed$path_NEX <- in_dir_list |
368 | 392 |
|
369 | 393 |
##Quick exploration of raster object |
370 |
robj1 <- load_obj(list_raster_obj_files[[1]]) |
|
394 |
robj1 <- load_obj(list_raster_obj_files[[2]]) #This is tile corresponding to Oregon |
|
395 |
|
|
371 | 396 |
names(robj1) |
372 | 397 |
names(robj1$method_mod_obj[[1]]) #for January 1, 2010 |
373 | 398 |
names(robj1$method_mod_obj[[1]]$dailyTmax) #for January |
374 | 399 |
|
375 | 400 |
names(robj1$clim_method_mod_obj[[1]]$data_month) #for January |
376 | 401 |
names(robj1$validation_mod_month_obj[[1]]$data_s) #for January with predictions |
402 |
#Get the number of models predicted |
|
403 |
nb_mod <- length(unique(robj1$tb_diagnostic_v$pred_mod)) |
|
377 | 404 |
|
378 | 405 |
################ |
379 | 406 |
#### Table 1: Average accuracy metrics |
... | ... | |
452 | 479 |
##### Table 4: Add later on: daily info |
453 | 480 |
### with also data_s and data_v saved!!! |
454 | 481 |
|
482 |
#Insert here...compute input and predicted ranges to spot potential errors? |
|
455 | 483 |
|
456 | 484 |
###################################################### |
457 |
####### PART 2 CREATE MOSAIC OF PREDICTIONS PER DAY ### |
|
485 |
####### PART 2 CREATE MOSAIC OF PREDICTIONS PER DAY, Delta surfaces and clim ###
|
|
458 | 486 |
|
459 | 487 |
dates_l <- unique(robj1$tb_diagnostic_s$date) #list of dates to query tif |
460 | 488 |
|
461 | 489 |
## make this a function? report on number of tiles used for mosaic... |
462 | 490 |
|
463 | 491 |
#inputs: build a pattern to find files |
464 |
y_var_name <- "dailyTmax" |
|
465 |
interpolation_method <- c("gam_CAI") |
|
492 |
y_var_name <- "dailyTmax" #set up in parameters of this script
|
|
493 |
interpolation_method <- c("gam_CAI") #set up in parameters of the script
|
|
466 | 494 |
name_method <- paste(interpolation_method,"_",y_var_name,"_",sep="") |
467 |
l_pattern_models <- lapply(c(".*predicted_mod1_0_1.*",".*predicted_mod2_0_1.*",".*predicted_mod3_0_1.*",".*predicted_mod_kr_0_1.*"), |
|
495 |
#make it general using nb_mod!! |
|
496 |
#could be set up at the begining? |
|
497 |
|
|
498 |
mod_id <- c(1:(nb_mod-1),"_kr") |
|
499 |
pred_pattern_str <- paste(".*predicted_mod",mod_id,"_0_1.*",sep="") |
|
500 |
#,".*predicted_mod2_0_1.*",".*predicted_mod3_0_1.*",".*predicted_mod_kr_0_1.*") |
|
501 |
#l_pattern_models <- lapply(c(".*predicted_mod1_0_1.*",".*predicted_mod2_0_1.*",".*predicted_mod3_0_1.*",".*predicted_mod_kr_0_1.*"), |
|
502 |
FUN=function(x){paste(x,dates_l,".*.tif",sep="")}) |
|
503 |
l_pattern_models <- lapply(pred_pattern_str, |
|
468 | 504 |
FUN=function(x){paste(x,dates_l,".*.tif",sep="")}) |
469 | 505 |
#gam_CAI_dailyTmax_predicted_mod_kr_0_1_20101231_30_145.0_-120.0.tif |
470 |
out_prefix_s <- paste(name_method,c("predicted_mod1_0_01","predicted_mod2_0_01","predicted_mod3_0_01","predicted_mod_kr_0_1"),sep="") |
|
471 |
dates_l #list of predicted dates |
|
472 |
#l_out_rastnames_var <- paste(name_method,"predicted_mod1_0_01_",dates_l,sep="") |
|
473 |
l_out_rastnames_var <- lapply(out_prefix_s, |
|
474 |
FUN=function(x){paste(x,"_",dates_l,sep="")}) |
|
475 | 506 |
#gam_CAI_dailyTmax_predicted_mod_kr_0_1_20101231_30_145.0_-120.0.tif |
476 | 507 |
|
477 | 508 |
##Get list of predicted tif across all tiles, models and dates... |
478 | 509 |
#this takes time, use mclapply!! |
479 | 510 |
lf_pred_tif <- vector("list",length=length(l_pattern_models)) #number of models is 3 |
480 | 511 |
for (i in 1:length(l_pattern_models)){ |
481 |
l_pattern_mod <- l_pattern_models[[i]] |
|
512 |
l_pattern_mod <- l_pattern_models[[i]] #365 dates
|
|
482 | 513 |
list_tif_files_dates <-lapply(1:length(l_pattern_mod),FUN=list_tif_fun, |
483 | 514 |
in_dir_list=in_dir_list,pattern_str=l_pattern_models[[i]]) |
484 | 515 |
lf_pred_tif[[i]] <- list_tif_files_dates |
... | ... | |
491 | 522 |
#"CAI_TMAX_clim_month_11_mod2_0_145.0_-120.0.tif" |
492 | 523 |
lf_clim_tif <- vector("list",length=nb_mod) #number of models is 3 |
493 | 524 |
for (i in 1:length(l_pattern_models)){ |
494 |
l_pattern_mod <- l_pattern_models[[i]] |
|
525 |
l_pattern_mod <- l_pattern_models[[i]] #12 dates
|
|
495 | 526 |
list_tif_files_dates <- lapply(1:length(l_pattern_mod),FUN=list_tif_fun, |
496 | 527 |
in_dir_list=in_dir_list,pattern_str=l_pattern_models[[i]]) |
497 | 528 |
lf_clim_tif[[i]] <- list_tif_files_dates |
498 | 529 |
} |
499 | 530 |
|
531 |
#Now get delta surfaces: |
|
532 |
date_l# <- paste("clim_month_",1:12,sep="") |
|
533 |
#l_pattern_models <- lapply(c("_mod1_0_1.*","_mod2_0_1.*","_mod3_0_1.*","_mod_kr_0_1.*"), |
|
534 |
# FUN=function(x){paste("*.",month_l,x,".*.tif",sep="")}) |
|
535 |
l_pattern_models <- lapply(c(".*delta_dailyTmax_mod1_del_0_1.*",".*delta_dailyTmax_mod2_del_0_1.*",".*delta_dailyTmax_mod3_del_0_1.*",".*delta_dailyTmax_mod_kr_del_0_1.*"), |
|
536 |
FUN=function(x){paste(x,dates_l,".*.tif",sep="")}) |
|
537 |
|
|
538 |
lf_delta_tif <- vector("list",length=nb_mod) #number of models is 3 |
|
539 |
for (i in 1:length(l_pattern_models)){ |
|
540 |
l_pattern_mod <- l_pattern_models[[i]] |
|
541 |
list_tif_files_dates <- lapply(1:length(l_pattern_mod),FUN=list_tif_fun, |
|
542 |
in_dir_list=in_dir_list,pattern_str=l_pattern_models[[i]]) |
|
543 |
lf_delta_tif[[i]] <- list_tif_files_dates |
|
544 |
} |
|
500 | 545 |
|
501 | 546 |
|
502 |
#### NOW create mosaic images |
|
503 |
nb_mod <- 4 |
|
547 |
#### NOW create mosaic images for daily prediction |
|
504 | 548 |
|
549 |
out_prefix_s <- paste(name_method,c("predicted_mod1_0_01","predicted_mod2_0_01","predicted_mod3_0_01","predicted_mod_kr_0_1"),sep="") |
|
550 |
dates_l #list of predicted dates |
|
551 |
#l_out_rastnames_var <- paste(name_method,"predicted_mod1_0_01_",dates_l,sep="") |
|
552 |
l_out_rastnames_var <- lapply(out_prefix_s, |
|
553 |
FUN=function(x){paste(x,"_",dates_l,sep="")}) |
|
554 |
|
|
555 |
#nb_mod <- 4 #this is set up earlier |
|
556 |
##Add option to specify wich dates to mosaic?? |
|
557 |
day_to_mosaic <- c("20100101","20100901") |
|
558 |
if (!is.null(day_to_mosaic)){ |
|
559 |
list_days <-match(day_to_mosaic,dates_l) |
|
560 |
}else{ |
|
561 |
list_days <- 1:365 #should check for year in case it has 366, add later!! |
|
562 |
} |
|
563 |
###Make this a function later?? |
|
505 | 564 |
for (i in 1:nb_mod){ |
506 | 565 |
|
507 |
#l_pattern_mod <- l_pattern_models[[i]] |
|
508 |
#out_prefix_s <- |
|
509 |
|
|
510 |
#list_tif_files_dates <- list_tif_fun(1,in_dir_list,l_pattern_mod) |
|
511 |
|
|
512 |
##List of predicted tif ... |
|
513 |
#list_tif_files_dates <-lapply(1:length(l_pattern_mod),FUN=list_tif_fun, |
|
514 |
# in_dir_list=in_dir_list,pattern_str=l_pattern_mod) |
|
515 | 566 |
list_tif_files_dates <- lf_pred_tif[[i]] |
516 |
#save(list_tif_files_dates, file=paste("list_tif_files_dates","_",out_prefix,".RData",sep="")) |
|
517 | 567 |
|
518 |
mosaic_list_var <- list_tif_files_dates |
|
519 |
# l_out_rastnames_var <- paste(name_method,"predicted_mod1_0_01_",dates_l,sep="") |
|
520 |
# out_rastnames_var <- l_out_rastnames_var[i] |
|
521 |
#l_out_rastnames_var <- paste(name_method,"predicted_mod2_0_01_",dates_l,sep="") |
|
522 |
l_out_rastnames_var <- paste(name_method,"predicted_mod3_0_01_",dates_l,sep="") |
|
523 |
out_rastnames_var <- l_out_rastnames_var |
|
568 |
mosaic_list_var <- list_tif_files_dates |
|
569 |
out_rastnames_var <- l_out_rastnames_var[[i]] |
|
524 | 570 |
|
525 | 571 |
file_format <- ".tif" |
526 | 572 |
NA_flag_val <- -9999 |
527 | 573 |
|
528 |
j<-1 |
|
574 |
j<-1 #date index for loop
|
|
529 | 575 |
list_param_mosaic<-list(j,mosaic_list_var,out_rastnames_var,out_dir,file_format,NA_flag_val) |
530 | 576 |
names(list_param_mosaic)<-c("j","mosaic_list","out_rastnames","out_path","file_format","NA_flag_val") |
531 |
list_var_mosaiced <- mclapply(1:2,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
577 |
#list_var_mosaiced <- mclapply(1:2,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
578 |
list_var_mosaiced <- mclapply(list_days,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
579 |
#list_var_mosaiced <- mclapply(1:1,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 1) |
|
532 | 580 |
#list_var_mosaiced <- mclapply(1:365,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
533 |
|
|
581 |
|
|
582 |
#mosaic for delt sufaces? |
|
583 |
#mosoaic for clim months? |
|
534 | 584 |
|
535 | 585 |
} |
536 | 586 |
|
537 |
### Now find out how many files were predicted |
|
587 |
###################### |
|
588 |
### mosaic clim monthly data...this will be a function later... |
|
538 | 589 |
|
539 |
l_pattern_mod1<-paste(".*predicted_mod1_0_1.*",dates_l,".*.tif",sep="") |
|
590 |
#Now get the clim surfaces: |
|
591 |
month_l <- paste("clim_month_",1:12,sep="") |
|
592 |
#l_pattern_models <- lapply(c("_mod1_0_1","_mod2_0_1","_mod3_0_1","_mod_kr_0_1"), |
|
593 |
# FUN=function(x){paste(x,"_",month_l,sep="")}) |
|
540 | 594 |
|
541 |
l_f_t12 <- list.files(path=in_dir_list[12],".*predicted_mod1_0_1.*") |
|
595 |
out_prefix_s <- paste(name_method,c("_mod1_0_01","_mod2_0_01","_mod3_0_01","_mod_kr_0_1"),sep="") |
|
596 |
dates_l #list of predicted dates |
|
597 |
#l_out_rastnames_var <- paste(name_method,"predicted_mod1_0_01_",dates_l,sep="") |
|
598 |
l_out_rastnames_var <- lapply(out_prefix_s, |
|
599 |
FUN=function(x){paste(x,"_",month_l,sep="")}) |
|
542 | 600 |
|
601 |
for (i in 1:nb_mod){ |
|
602 |
|
|
603 |
#this should be the input param for the new function generated automatically... |
|
604 |
list_tif_files_dates <- lf_clim_tif[[i]] |
|
605 |
mosaic_list_var <- list_tif_files_dates |
|
606 |
out_rastnames_var <- l_out_rastnames_var[[i]] |
|
607 |
#file_format <- ".tif" |
|
608 |
#NA_flag_val <- -9999 |
|
609 |
|
|
610 |
j<-1 #date index for loop |
|
611 |
list_param_mosaic<-list(j,mosaic_list_var,out_rastnames_var,out_dir,file_format,NA_flag_val) |
|
612 |
names(list_param_mosaic)<-c("j","mosaic_list","out_rastnames","out_path","file_format","NA_flag_val") |
|
613 |
#list_var_mosaiced <- mclapply(1:2,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
614 |
list_var_mosaiced <- mclapply(1:12,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 4) |
|
615 |
} |
|
616 |
|
|
617 |
###################### |
|
618 |
#### NOW create mosaic images for daily delta prediction |
|
619 |
#This should be a function!!! |
|
620 |
date_l# <- paste("clim_month_",1:12,sep="") |
|
621 |
#l_pattern_models <- lapply(c("_mod1_0_1.*","_mod2_0_1.*","_mod3_0_1.*","_mod_kr_0_1.*"), |
|
622 |
# FUN=function(x){paste("*.",month_l,x,".*.tif",sep="")}) |
|
623 |
#l_pattern_models <- lapply(c(".*delta_dailyTmax_mod1_del_0_1.*",".*delta_dailyTmax_mod2_del_0_1.*",".*delta_dailyTmax_mod3_del_0_1.*",".*delta_dailyTmax_mod_kr_del_0_1.*"), |
|
624 |
# FUN=function(x){paste(x,dates_l,".*.tif",sep="")}) |
|
625 |
|
|
626 |
out_prefix_s <- paste(name_method,c("delta_mod1_0_01","delta_mod2_0_01","delta_mod3_0_01","delta_mod_kr_0_1"),sep="") |
|
627 |
dates_l #list of predicted dates |
|
628 |
#l_out_rastnames_var <- paste(name_method,"predicted_mod1_0_01_",dates_l,sep="") |
|
629 |
l_out_rastnames_var <- lapply(out_prefix_s, |
|
630 |
FUN=function(x){paste(x,"_",dates_l,sep="")}) |
|
631 |
|
|
632 |
#nb_mod <- 4 #this is set up earlier |
|
633 |
##Add option to specify wich dates to mosaic?? |
|
634 |
day_to_mosaic <- c("20100101","20100901") |
|
635 |
if (!is.null(day_to_mosaic)){ |
|
636 |
list_days <-match(day_to_mosaic,dates_l) |
|
637 |
}else{ |
|
638 |
list_days <- 1:365 #should check for year in case it has 366, add later!! |
|
639 |
} |
|
640 |
###Make this a function later?? |
|
641 |
for (i in 1:nb_mod){ |
|
642 |
|
|
643 |
list_tif_files_dates <- lf_pred_tif[[i]] |
|
644 |
mosaic_list_var <- list_tif_files_dates |
|
645 |
out_rastnames_var <- l_out_rastnames_var[[i]] |
|
646 |
#this is be set up earlier... |
|
647 |
#file_format <- ".tif" |
|
648 |
#NA_flag_val <- -9999 |
|
543 | 649 |
|
544 |
l_f_bytiles<-lapply(in_dir_list,function(x){list.files(path=x,pattern=".*predicted_mod1_0_1.*")}) |
|
545 |
l_f_bytiles<-lapply(in_dir_list,function(x){list.files(path=x,pattern=".*predicted_mod1_0_1.*")}) |
|
546 |
l_f_bytiles<-lapply(in_dir_list,function(x){list.files(path=x,pattern=".*predicted_mod1_0_1.*")}) |
|
650 |
j<-1 #date index for loop |
|
651 |
list_param_mosaic<-list(j,mosaic_list_var,out_rastnames_var,out_dir,file_format,NA_flag_val) |
|
652 |
names(list_param_mosaic)<-c("j","mosaic_list","out_rastnames","out_path","file_format","NA_flag_val") |
|
653 |
#list_var_mosaiced <- mclapply(1:2,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
654 |
list_var_mosaiced <- mclapply(list_days,FUN=mosaic_m_raster_list,list_param=list_param_mosaic,mc.preschedule=FALSE,mc.cores = 2) |
|
655 |
} |
|
547 | 656 |
|
657 |
### Now find out how many files were predicted |
|
548 | 658 |
|
549 |
#system("scp -p ./*.tif parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_global_analyses_05122014") |
|
550 | 659 |
|
551 | 660 |
###################################################### |
552 | 661 |
####### PART 3: EXAMINE STATIONS AND MODEL FITTING ### |
... | ... | |
607 | 716 |
###################################################### |
608 | 717 |
####### PART 4: Get shapefile tiling with centroids ### |
609 | 718 |
|
610 |
system("scp -p /nobackupp4/aguzman4/climateLayers/output4/subset/shapefiles/* parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/shapefiles") |
|
611 | 719 |
#in_dir_shp <- "/nobackupp4/aguzman4/climateLayers/output4/subset/shapefiles/" |
612 | 720 |
|
613 | 721 |
#get shape files for the region being assessed: |
... | ... | |
628 | 736 |
|
629 | 737 |
########### LAST PART: COPY SOME DATA BACK TO ATLAS ##### |
630 | 738 |
|
631 |
#Copy summary and mosaic back to atlas |
|
632 |
system("scp -p ./*.txt parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_global_analyses_05122014") |
|
633 |
system("scp -p ./*.txt ./*.tif parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_global_analyses_05122014") |
|
739 |
#### FIRST COPY DATA FOR SPECIFIC TILES ##### |
|
740 |
#Copy specific tiles info back...This assumes that the tree structre |
|
741 |
#has been created on ATLAS: |
|
742 |
#../$out_dir/ouput/tile_coord |
|
743 |
|
|
744 |
list_tile_scp <- c(1,2) |
|
745 |
|
|
746 |
for (i in 1:length(list_tile_scp)){ |
|
747 |
tile_nb <- list_tile_scp[i] |
|
748 |
#nb_mod <- 3+1 #set up earlier |
|
749 |
date_selected <- c("20100101","20100901") #should be set up earlier |
|
750 |
date_index <- c(1,244) #list_day?? |
|
751 |
#tile_nb <- 1 |
|
752 |
|
|
753 |
in_dir_tile <- basename(df_tile_processed$path_NEX[tile_nb]) |
|
754 |
#/data/project/layers/commons/NEX_data/output_run2_05122014/output |
|
755 |
Atlas_dir <- file.path(file.path("/data/project/layers/commons/NEX_data/",basename(out_dir),"output"),in_dir_tile) |
|
756 |
Atlas_hostname <- "parmentier@atlas.nceas.ucsb.edu" |
|
757 |
#filenames_NEX <- list_raster_obj_files[tile_nb] #copy raster prediction object |
|
758 |
#cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
|
759 |
#system(cmd_str) |
|
760 |
|
|
761 |
#Now copy back tif for specific dates and tile (date 1 and date 244) |
|
762 |
#nb_mod <- 3+1 |
|
763 |
lf_cp_day <- vector("list",length=length(date_selected)) |
|
764 |
#Get relevant daily info |
|
765 |
for(i in 1:length(date_selected)){ |
|
766 |
#d |
|
767 |
index <- date_index[i] |
|
768 |
#get all predicted tmax files for all models and specific date, tile |
|
769 |
lf_cp_pred_tif <- unlist(lapply(1:nb_mod,FUN=function(x){lf_pred_tif[[x]][[index]][[tile_nb]]})) |
|
770 |
lf_cp_delta_tif <- unlist(lapply(1:nb_mod,FUN=function(x){lf_delta_tif[[x]][[index]][[tile_nb]]})) |
|
771 |
lf_cp_day[[i]] <- unlist(c(unlist(lf_cp_pred_tif),unlist(lf_cp_delta_tif))) |
|
772 |
} |
|
773 |
#get the monthly info... |
|
774 |
month_index <- 1:12 #can subset |
|
775 |
#month_index <- c(1,9) #can subset |
|
776 |
lf_cp_month <- vector("list",length=length(month_index)) |
|
777 |
|
|
778 |
for(i in 1:length(month_index)){ |
|
779 |
#d |
|
780 |
index <- month_index[i] |
|
781 |
#get all predicted tmax files for all models and specific date, tile |
|
782 |
lf_cp_month[[i]] <- unlist(lapply(1:nb_mod,FUN=function(x){lf_clim_tif[[x]][[index]][[tile_nb]]})) |
|
783 |
} |
|
784 |
##Add RData object for specified tile... |
|
785 |
lf_cp_RData_tif <- c(lf_covar_obj[tile_nb],lf_covar_tif[tile_nb],list_raster_obj_files[[tile_nb]]) |
|
786 |
#unlist(lf_cp_RData_tif) |
|
787 |
lf_cp <- unlist(c(lf_cp_day,lf_cp_month,lf_cp_RData_tif)) |
|
788 |
#lf_cp <- c(unlist(c(lf_cp_day,lf_cp_month)),list_raster_obj_files[tile_nb]) |
|
789 |
filenames_NEX <- paste(lf_cp,collapse=" ") |
|
790 |
#filenames_NEX <- paste(list_tif_files_dates[[1]][[6]],list_tif_files_dates[[244]][[6]],lf_covar_tif[6]) #to get first date and tile 6 prediction mod1 |
|
791 |
cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
|
792 |
system(cmd_str) |
|
793 |
} |
|
634 | 794 |
|
635 |
#Copy specific tiles info back... |
|
636 |
#tile_6: Oregon (45.0_-120.0) |
|
637 |
system("scp -p ./*.txt ./*.tif parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_05122014/output/45.0_-120.0") |
|
638 |
#tile_8: Californi-Arizona |
|
639 |
system("scp -p ./*.txt ./*.tif parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_05122014/output/45.0_-120.0") |
|
640 |
df_tile_processed$path_NEX |
|
641 |
list_raster_obj_files[6] |
|
642 |
list_raster_obj_files[8] |
|
643 |
Atlas_dir <- "/data/project/layers/commons/NEX_data/output_run2_05122014/output/45.0_-120.0" |
|
644 |
Atlas_hostname <- "parmentier@atlas.nceas.ucsb.edu" |
|
795 |
#### COPY SHAPEFILES, TIF MOSAIC, COMBINED TEXT FILES etc... |
|
645 | 796 |
|
646 |
#Oregon tile |
|
647 |
filenames_NEX <- list_raster_obj_files[6] #copy raster prediction object |
|
797 |
#copy shapefiles defining regions |
|
798 |
Atlas_dir <- file.path("/data/project/layers/commons/NEX_data/",basename(out_dir),"output/subset/shapefiles") |
|
799 |
Atlas_hostname <- "parmentier@atlas.nceas.ucsb.edu" |
|
800 |
lf_cp_shp <- list.files(in_dir_shp, ".shp",full.names=T) |
|
801 |
filenames_NEX <- paste(lf_cp_shp,collapse=" ") #copy raster prediction object |
|
648 | 802 |
cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
649 | 803 |
system(cmd_str) |
650 |
#Now copy back tif for specific dates and tile (date 1 and date 244) |
|
651 |
date_selected <- c("20100101","20100901") |
|
652 |
date_index <- c(1,244) |
|
653 |
tile_nb <- 6 |
|
654 |
nb_mod <- 3+1 |
|
655 |
lf_cp <- vector("list",length=length(date_selected)) |
|
656 |
for(i in 1:length(date_selected)){ |
|
657 |
index <- date_index[i] |
|
658 |
#get all predicted tmax files for all models and specific date, tile |
|
659 |
lf_cp[[i]] <- unlist(lapply(1:nb_mod,FUN=function(x){lf_pred_tif[[x]][[index]][[tile_nb]]})) |
|
660 |
} |
|
661 | 804 |
|
662 |
lf_clim_tiff[[]]
|
|
805 |
#Copy summary textfiles and mosaic back to atlas
|
|
663 | 806 |
|
664 |
filenames_NEX <- paste(list_tif_files_dates[[1]][[6]],list_tif_files_dates[[244]][[6]],lf_covar_tif[6]) #to get first date and tile 6 prediction mod1 |
|
807 |
Atlas_dir <- file.path("/data/project/layers/commons/NEX_data/",basename(out_dir))#,"output/subset/shapefiles") |
|
808 |
Atlas_hostname <- "parmentier@atlas.nceas.ucsb.edu" |
|
809 |
lf_cp_f <- list.files(out_dir,full.names=T)#copy all files can filter later |
|
810 |
filenames_NEX <- paste(lf_cp_f,collapse=" ") #copy raster prediction object |
|
665 | 811 |
cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
666 | 812 |
system(cmd_str) |
667 | 813 |
|
668 |
#California-Arizona tile |
|
669 |
Atlas_dir <- "/data/project/layers/commons/NEX_data/output_run2_05122014/output/35.0_-115.0" |
|
670 |
filenames_NEX <- paste(list_raster_obj_files[8],lf_covar_obj[8]) #copy raster prediction object |
|
671 |
cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
|
672 |
system(cmd_str) |
|
673 |
#Now copy back tif for specific dates and tile (date 1 and date 244) |
|
674 |
filenames_NEX <- paste(list_tif_files_dates[[1]][[8]],list_tif_files_dates[[244]][[8]],lf_covar_tif[8]) #to get first date and tile 6 prediction mod1 |
|
675 |
cmd_str <- paste("scp -p",filenames_NEX,paste(Atlas_hostname,Atlas_dir,sep=":"), sep=" ") |
|
676 |
system(cmd_str) |
|
814 |
system("scp -p ./*.txt parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_global_analyses_05122014") |
|
815 |
system("scp -p ./*.txt ./*.tif parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/output_run2_global_analyses_05122014") |
|
816 |
|
|
817 |
system("scp -p /nobackupp4/aguzman4/climateLayers/output4/subset/shapefiles/* parmentier@atlas.nceas.ucsb.edu:/data/project/layers/commons/NEX_data/shapefiles") |
|
677 | 818 |
|
678 | 819 |
##################### END OF SCRIPT ###################### |
Also available in: Unified diff
script NEX run assessment part1: major changes to mosaic delta, deviation, monthly predictions and copy back results