Revision 36198b61
Added by Benoit Parmentier about 8 years ago
climate/research/oregon/interpolation/global_product_assessment_part0.R | ||
---|---|---|
9 | 9 |
# |
10 | 10 |
#AUTHOR: Benoit Parmentier |
11 | 11 |
#CREATED ON: 10/27/2016 |
12 |
#MODIFIED ON: 11/07/2016
|
|
12 |
#MODIFIED ON: 11/09/2016
|
|
13 | 13 |
#Version: 1 |
14 | 14 |
#PROJECT: Environmental Layers project |
15 | 15 |
#COMMENTS: |
... | ... | |
20 | 20 |
#source /nobackupp6/aguzman4/climateLayers/sharedModules2/etc/environ.sh |
21 | 21 |
# |
22 | 22 |
#setfacl -Rm u:aguzman4:rwx /nobackupp6/aguzman4/climateLayers/LST_tempSpline/ |
23 |
#COMMIT: fixing rasterize function and computing maximum overlap for each pixel in a region
|
|
23 |
#COMMIT: macthing extent to region for tile predicted
|
|
24 | 24 |
|
25 | 25 |
################################################################################################# |
26 | 26 |
|
... | ... | |
128 | 128 |
plotting_figures <- TRUE #running part2 of assessment to generate figures... # param 14 |
129 | 129 |
num_cores <- 6 #number of cores used # param 13, arg 8 |
130 | 130 |
#python_bin <- "/nobackupp6/aguzman4/climateLayers/sharedModules2/bin" #PARAM 30 |
131 |
python_bin <- "/usr/bin" #PARAM 30 |
|
131 |
#python_bin <- "/usr/bin" #PARAM 30, NCEAS |
|
132 |
python_bin <- "/nobackupp6/aguzman4/climateLayers/sharedModules2/bin" #PARAM 30" |
|
132 | 133 |
NA_flag_val_mosaic <- -32768 |
133 | 134 |
in_dir_list_filename <- NULL #if NULL, use the in_dir directory to search for info |
134 | 135 |
#countries_shp <-"/data/project/layers/commons/NEX_data/countries.shp" #Atlas |
... | ... | |
140 | 141 |
#parent output dir for the current script analyes |
141 | 142 |
y_var_name <- "dailyTmax" #PARAM1 |
142 | 143 |
out_suffix <- "predictions_assessment_reg6_10302016" |
143 |
file_format <- ".rst" #PARAM 9 |
|
144 |
#file_format <- ".rst" #PARAM 9
|
|
144 | 145 |
NA_value <- -9999 #PARAM10 |
145 | 146 |
NA_flag_val <- NA_value |
146 | 147 |
#multiple_region <- TRUE #PARAM 12 |
... | ... | |
375 | 376 |
tile_coord <- basename(in_dir_reg[1]) |
376 | 377 |
date_val <- df_missing$date[1] |
377 | 378 |
|
378 |
function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11052016.R"
|
|
379 |
source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script
|
|
379 |
### use rasterize
|
|
380 |
spdf_tiles <- do.call(bind, shps_tiles) #bind all tiles together in one shapefile
|
|
380 | 381 |
|
381 |
undebug(rasterize_tile_day) |
|
382 |
#function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11052016.R" |
|
383 |
#source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script |
|
384 |
|
|
385 |
#undebug(rasterize_tile_day) |
|
382 | 386 |
list_predicted <- rasterize_tile_day(1, |
383 | 387 |
list_spdf=shps_tiles, |
384 | 388 |
df_missing=df_missing, |
... | ... | |
461 | 465 |
plot(r_table,col=my_col,legend=F,box=F,axes=F) |
462 | 466 |
legend(x='topright', legend =rat$legend,fill = my_col,cex=0.8) |
463 | 467 |
|
464 |
r <- raster(matrix(runif(20),5,4)) |
|
465 |
r[r>.5] <- NA |
|
466 |
g <- as(r, 'SpatialGridDataFrame') |
|
467 |
p <- as(r, 'SpatialPixels') |
|
468 |
p_spdf <- as(r_overlap_m,"SpatialPointsDataFrame") |
|
469 |
plot(r) |
|
470 |
points(p) |
|
468 |
#r <- raster(matrix(runif(20),5,4)) |
|
469 |
#r[r>.5] <- NA |
|
470 |
#g <- as(r, 'SpatialGridDataFrame') |
|
471 |
#p <- as(r, 'SpatialPixels') |
|
472 |
#p_spdf <- as(r_overlap_m,"SpatialPointsDataFrame") |
|
473 |
#plot(r) |
|
474 |
#points(p) |
|
475 |
|
|
476 |
### now assign id and match extent for tiles |
|
477 |
|
|
478 |
lf_files <- unlist(list_predicted) |
|
479 |
rast_ref_name <- infile_mask |
|
480 |
rast_ref <- rast_ref_name |
|
481 |
|
|
482 |
##Maching resolution is probably only necessary for the r mosaic function |
|
483 |
#Modify later to take into account option R or python... |
|
484 |
list_param_raster_match <- list(lf_files,rast_ref,file_format,python_bin,out_suffix_str_tmp,out_dir_str) |
|
485 |
names(list_param_raster_match) <- c("lf_files","rast_ref","file_format","python_bin","out_suffix","out_dir_str") |
|
486 |
|
|
487 |
#undebug(raster_match) |
|
488 |
#r_test <- raster_match(1,list_param_raster_match) |
|
489 |
#r_test <- raster(raster_match(1,list_param_raster_match)) |
|
490 |
|
|
491 |
list_tiles_predicted_m <- unlist(mclapply(1:length(lf_files), |
|
492 |
FUN=raster_match,list_param=list_param_raster_match, |
|
493 |
mc.preschedule=FALSE,mc.cores = num_cores)) |
|
494 |
|
|
495 |
#r_stack <- stack(list_tiles_predicted_m) |
|
496 |
list_mask_out_file_name <- lf_files |
|
497 |
mclapply(1:length(list_tiles_predicted_m), |
|
498 |
FUN=function(i){mask(raster(list_tiles_predicted_m[i]),r_mask,filename=list_mask_out_file_name[i])}, |
|
499 |
mc.preschedule=FALSE,mc.cores = num_cores) |
|
500 |
#r_stack_masked <- mask(r, m2) #, maskvalue=TRUE) |
|
501 |
|
|
502 |
##Now loop through every day if missing then generate are raster showing map of number of prediction |
|
503 |
|
|
471 | 504 |
#http://stackoverflow.com/questions/19586945/how-to-legend-a-raster-using-directly-the-raster-attribute-table-and-displaying |
472 | 505 |
# |
473 | 506 |
#http://gis.stackexchange.com/questions/148398/how-does-spatial-polygon-over-polygon-work-to-when-aggregating-values-in-r |
... | ... | |
478 | 511 |
#4. generate a table? for each pixel it can say if is part of a specific tile |
479 | 512 |
#5. workout a formula to generate the number of predictions for each pixel based on tile predicted for each date!! |
480 | 513 |
|
481 |
### use rasterize |
|
482 |
spdf_tiles <- do.call(bind, shps_tiles) #bind all tiles together in one shapefile |
|
483 |
spdf_tiles <- do.call(intersect, shps_tiles) |
|
484 |
|
|
485 |
### Now use intersect to retain actual overlap |
|
486 |
|
|
487 |
for(i in 1:length(shps_tiles)){ |
|
488 |
overlap_intersect <- intersect(shps_tiles[[1]],shps_tiles[[i]]) |
|
489 |
} |
|
490 |
|
|
491 |
overlap_intersect <- lapply(1:length(shps_tiles),FUN=function(i){intersect(shps_tiles[[1]],shps_tiles[[i]])}) |
|
492 |
#test <- overlap_intersect <- intersect(shps_tiles[[1]],shps_tiles[[2]])) |
|
493 |
|
|
494 |
names(overlap_intersect) <- basename(in_dir_reg) |
|
495 |
shp_selected <- unlist(lapply(1:length(overlap_intersect),function(i){!is.null(overlap_intersect[[i]])})) |
|
496 |
test_list <- overlap_intersect[shp_selected] |
|
497 |
spdf_tiles_test <- do.call(bind, test_list) #combines all intersect!! |
|
498 |
#ll <- ll[ ! sapply(ll, is.null) ] |
|
499 |
test <- overlap_intersect[!lapply(overlap_intersect,is.null)] |
|
500 |
spdf_tiles_test <- do.call(bind, test) #combines all intersect!! |
|
501 |
#ll <- ll[ ! sapply(ll, is.null) ] |
|
502 |
spdf_tiles <- do.call(bind, overlap_intersect[1:4]) #combines all intersect!! |
|
503 |
spdf_tiles_test <- do.call(bind, test) #combines all intersect!! |
|
504 |
|
|
505 |
plot(spdf_tiles_test,add=T,border="green",usePolypath = FALSE) #added usePolypath following error on brige and NEX |
|
506 |
|
|
507 |
matrix_overlap%*%df_missing[1,1:26] |
|
508 |
|
|
509 |
|
|
510 |
## For each day can do overalp matrix* prediction |
|
511 |
## if prediction and overlap then 1 else 0, if no-overlap is then NA |
|
512 |
## then for each tile compute the number of excepted predictions taken into account in a tile |
|
513 |
|
|
514 |
#combine polygon |
|
515 |
#http://gis.stackexchange.com/questions/155328/merging-multiple-spatialpolygondataframes-into-1-spdf-in-r |
|
516 |
|
|
517 |
#http://gis.stackexchange.com/questions/116388/count-overlapping-polygons-in-single-shape-file-with-r |
|
518 |
|
|
519 |
#### Use the predictions directory |
|
520 |
#By region |
|
521 |
#For each polygon/tile find polygon overlapping with count and ID (like list w) |
|
522 |
#for each polygon/tile and date find if there is a prediction using the tif (multiply number) |
|
523 |
#for each date of year report data in table. |
|
524 |
|
|
525 |
#go through table and hsow if there are missing data (no prediction) or report min predictions for tile set? |
|
526 |
|
|
527 |
#for each polygon find you overlap!! |
|
528 |
#plot number of overlap |
|
529 |
#for specific each find prediction... |
|
530 | 514 |
|
531 | 515 |
######################## |
532 | 516 |
#### Step 3: combine overlap information and number of predictions by day |
... | ... | |
538 | 522 |
} |
539 | 523 |
|
540 | 524 |
############################ END OF SCRIPT ################################## |
525 |
|
|
526 |
# spdf_tiles <- do.call(intersect, shps_tiles) |
|
527 |
# |
|
528 |
# ### Now use intersect to retain actual overlap |
|
529 |
# |
|
530 |
# for(i in 1:length(shps_tiles)){ |
|
531 |
# overlap_intersect <- intersect(shps_tiles[[1]],shps_tiles[[i]]) |
|
532 |
# } |
|
533 |
# |
|
534 |
# overlap_intersect <- lapply(1:length(shps_tiles),FUN=function(i){intersect(shps_tiles[[1]],shps_tiles[[i]])}) |
|
535 |
# #test <- overlap_intersect <- intersect(shps_tiles[[1]],shps_tiles[[2]])) |
|
536 |
# |
|
537 |
# names(overlap_intersect) <- basename(in_dir_reg) |
|
538 |
# shp_selected <- unlist(lapply(1:length(overlap_intersect),function(i){!is.null(overlap_intersect[[i]])})) |
|
539 |
# test_list <- overlap_intersect[shp_selected] |
|
540 |
# spdf_tiles_test <- do.call(bind, test_list) #combines all intersect!! |
|
541 |
# #ll <- ll[ ! sapply(ll, is.null) ] |
|
542 |
# test <- overlap_intersect[!lapply(overlap_intersect,is.null)] |
|
543 |
# spdf_tiles_test <- do.call(bind, test) #combines all intersect!! |
|
544 |
# #ll <- ll[ ! sapply(ll, is.null) ] |
|
545 |
# spdf_tiles <- do.call(bind, overlap_intersect[1:4]) #combines all intersect!! |
|
546 |
# spdf_tiles_test <- do.call(bind, test) #combines all intersect!! |
|
547 |
# |
|
548 |
# plot(spdf_tiles_test,add=T,border="green",usePolypath = FALSE) #added usePolypath following error on brige and NEX |
|
549 |
# |
|
550 |
# matrix_overlap%*%df_missing[1,1:26] |
|
551 |
# |
|
552 |
# |
|
553 |
# ## For each day can do overalp matrix* prediction |
|
554 |
# ## if prediction and overlap then 1 else 0, if no-overlap is then NA |
|
555 |
# ## then for each tile compute the number of excepted predictions taken into account in a tile |
|
556 |
# |
|
557 |
# #combine polygon |
|
558 |
# #http://gis.stackexchange.com/questions/155328/merging-multiple-spatialpolygondataframes-into-1-spdf-in-r |
|
559 |
# |
|
560 |
# #http://gis.stackexchange.com/questions/116388/count-overlapping-polygons-in-single-shape-file-with-r |
|
561 |
# |
|
562 |
# #### Use the predictions directory |
|
563 |
# #By region |
|
564 |
# #For each polygon/tile find polygon overlapping with count and ID (like list w) |
|
565 |
# #for each polygon/tile and date find if there is a prediction using the tif (multiply number) |
|
566 |
# #for each date of year report data in table. |
|
567 |
# |
|
568 |
# #go through table and hsow if there are missing data (no prediction) or report min predictions for tile set? |
|
569 |
# |
|
570 |
# #for each polygon find you overlap!! |
|
571 |
# #plot number of overlap |
|
572 |
# #for specific each find prediction... |
Also available in: Unified diff
macthing extent to region for tile predicted