Revision 768334c2
Added by Benoit Parmentier about 8 years ago
climate/research/oregon/interpolation/global_product_assessment_part0.R | ||
---|---|---|
9 | 9 |
# |
10 | 10 |
#AUTHOR: Benoit Parmentier |
11 | 11 |
#CREATED ON: 10/27/2016 |
12 |
#MODIFIED ON: 11/20/2016
|
|
12 |
#MODIFIED ON: 11/21/2016
|
|
13 | 13 |
#Version: 1 |
14 | 14 |
#PROJECT: Environmental Layers project |
15 | 15 |
#COMMENTS: |
... | ... | |
20 | 20 |
#source /nobackupp6/aguzman4/climateLayers/sharedModules2/etc/environ.sh |
21 | 21 |
# |
22 | 22 |
#setfacl -Rm u:aguzman4:rwx /nobackupp6/aguzman4/climateLayers/LST_tempSpline/ |
23 |
#COMMIT: debugging function of number of predictions for day with missing tiles
|
|
23 |
#COMMIT: moving function of number of predictions for day with missing tiles to functon script
|
|
24 | 24 |
|
25 | 25 |
################################################################################################# |
26 | 26 |
|
... | ... | |
86 | 86 |
source(file.path(script_path,function_assessment_part3)) #source all functions used in this script |
87 | 87 |
|
88 | 88 |
#Product assessment |
89 |
function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11152016b.R"
|
|
89 |
function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11212016.R"
|
|
90 | 90 |
source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script |
91 | 91 |
##Don't load part 1 and part2, mosaic package does not work on NEX |
92 | 92 |
#function_product_assessment_part1_functions <- "global_product_assessment_part1_functions_09192016b.R" |
... | ... | |
174 | 174 |
|
175 | 175 |
|
176 | 176 |
##### prepare list of parameters for call of function |
177 |
function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11212016b.R" |
|
178 |
source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script |
|
177 | 179 |
|
178 | 180 |
list_param_predictions_tiles_missing <- list(in_dir1,region_name,y_var_name,interpolation_method,out_suffix,out_dir, |
179 | 181 |
create_out_dir_param,proj_str,list_year_predicted,file_format,NA_flag_val, |
... | ... | |
187 | 189 |
#"threshold_missing_day", |
188 | 190 |
"pred_mod_name","metric_name") |
189 | 191 |
|
190 |
debug(predictions_tiles_missing_fun) |
|
192 |
#debug(predictions_tiles_missing_fun)
|
|
191 | 193 |
predictions_tiles_missing_fun(1,list_param=list_param_predictions_tiles_missing) |
192 | 194 |
|
193 | 195 |
#### Function to check missing tiles and estimate potential gaps |
194 |
predictions_tiles_missing_fun <- function(i,list_param){ |
|
195 |
|
|
196 |
|
|
197 |
############################## |
|
198 |
#### Parameters and constants |
|
199 |
|
|
200 |
|
|
201 |
in_dir1 <- list_param$in_dir1 |
|
202 |
region_name <- list_param$region_name #e.g. c("reg23","reg4") #run only for one region |
|
203 |
y_var_name <- list_param$y_var_name # e.g. dailyTmax" #PARAM3 |
|
204 |
interpolation_method <- list_param_run_assessment_prediction$interpolation_method #c("gam_CAI") #PARAM4 |
|
205 |
out_suffix <- list_param_run$out_suffix #output suffix e.g."run_global_analyses_pred_12282015" #PARAM5 |
|
206 |
out_dir <- list_param$out_dir #<- "/nobackupp8/bparmen1/" #PARAM6 |
|
207 |
create_out_dir_param <-list_param$create_out_dir_param #if TRUE output dir created #PARAM7 |
|
208 |
proj_str <- list_param$proj_str # CRS("+proj=longlat +ellps=WGS84 +datum=WGS84 +towgs84=0,0,0") #Station coords WGS84, #PARAM8 |
|
209 |
list_year_predicted <- list_param$list_year_predicted # 1984:2004 |
|
210 |
file_format <- list_param$file_format #<- ".tif" #format for mosaiced files #PARAM10 |
|
211 |
NA_flag_val <- list_param$NA_flag_val #<- -9999 #No data value, #PARAM11 |
|
212 |
num_cores <- list_param$num_cores #<- 6 #number of cores used #PARAM13 |
|
213 |
plotting_figures <- list_param$plotting_figures #if true run generate png for missing date |
|
214 |
|
|
215 |
##for plotting assessment function |
|
216 |
|
|
217 |
item_no <- list_param_run_assessment_prediction$mosaic_plot #PARAM14 |
|
218 |
day_to_mosaic_range <- list_param$day_to_mosaic_range #PARAM15 |
|
219 |
countries_shp <- list_param$countries_shp #PARAM17 |
|
220 |
plotting_figures <- list_param$plotting_figures #PARAM18 |
|
221 |
#threshold_missing_day <- list_param$threshold_missing_day #PARM20 |
|
222 |
pred_mod_name <- list_param$pred_mod_name |
|
223 |
metric_name <- list_param$metric_name |
|
224 |
|
|
225 |
########################## START SCRIPT ######################################### |
|
226 |
|
|
227 |
#system("ls /nobackup/bparmen1") |
|
228 |
out_dir <- in_dir |
|
229 |
if(create_out_dir_param==TRUE){ |
|
230 |
out_dir <- create_dir_fun(out_dir,out_suffix) |
|
231 |
setwd(out_dir) |
|
232 |
}else{ |
|
233 |
setwd(out_dir) #use previoulsy defined directory |
|
234 |
} |
|
235 |
|
|
236 |
setwd(out_dir) |
|
237 |
|
|
238 |
#list_outfiles <- vector("list", length=35) #collect names of output files, this should be dynamic? |
|
239 |
#list_outfiles_names <- vector("list", length=35) #collect names of output files |
|
240 |
|
|
241 |
year_predicted <- list_param_run_assessment_prediction$list_year_predicted[i] |
|
242 |
|
|
243 |
in_dir1_reg <- file.path(in_dir1,region_name) |
|
244 |
|
|
245 |
list_outfiles <- vector("list", length=14) #collect names of output files |
|
246 |
|
|
247 |
in_dir_list <- list.dirs(path=in_dir1_reg,recursive=FALSE) #get the list regions processed for this run |
|
248 |
|
|
249 |
#in_dir_list_all <- unlist(lapply(in_dir_list,function(x){list.dirs(path=x,recursive=F)})) |
|
250 |
in_dir_list_all <- in_dir_list |
|
251 |
in_dir_subset <- in_dir_list_all[grep("subset",basename(in_dir_list_all),invert=FALSE)] #select directory with shapefiles... |
|
252 |
in_dir_shp <- file.path(in_dir_subset,"shapefiles") |
|
253 |
|
|
254 |
#select only directories used for predictions |
|
255 |
#nested structure, we need to go to higher level to obtain the tiles... |
|
256 |
in_dir_reg <- in_dir_list[grep(".*._.*.",basename(in_dir_list),invert=FALSE)] #select directory with shapefiles... |
|
257 |
#in_dir_reg <- in_dir_list[grep("july_tiffs",basename(in_dir_reg),invert=TRUE)] #select directory with shapefiles... |
|
258 |
in_dir_list <- in_dir_reg |
|
259 |
|
|
260 |
in_dir_list <- in_dir_list[grep("bak",basename(basename(in_dir_list)),invert=TRUE)] #the first one is the in_dir1 |
|
261 |
#list of shapefiles used to define tiles |
|
262 |
in_dir_shp_list <- list.files(in_dir_shp,".shp",full.names=T) |
|
263 |
|
|
264 |
## load problematic tiles or additional runs |
|
265 |
#modify later... |
|
266 |
|
|
267 |
##raster_prediction object : contains testing and training stations with RMSE and model object |
|
268 |
in_dir_list_tmp <- file.path(in_dir_list,year_predicted) |
|
269 |
list_raster_obj_files <- mclapply(in_dir_list_tmp, |
|
270 |
FUN=function(x){list.files(path=x,pattern="^raster_prediction_obj.*.RData",full.names=T)}, |
|
271 |
mc.preschedule=FALSE,mc.cores = num_cores) |
|
272 |
|
|
273 |
#list_raster_obj_files <- try(lapply(in_dir_list_tmp,FUN=function(x){list.files(path=x,pattern="^raster_prediction_obj.*.RData",full.names=T)})) |
|
274 |
#Add stop message here...if no raster object in any tiles then break from the function |
|
275 |
|
|
276 |
list_names_tile_coord <- lapply(list_raster_obj_files,FUN=function(x){basename(dirname(x))}) |
|
277 |
list_names_tile_id <- paste("tile",1:length(list_raster_obj_files),sep="_") |
|
278 |
names(list_raster_obj_files)<- list_names_tile_id |
|
279 |
|
|
280 |
#pred_mod_name <- "mod1" |
|
281 |
list_lf_raster_tif_tiles <- mclapply(in_dir_list_tmp, |
|
282 |
FUN=function(x){list.files(path=x,pattern=paste0("gam_CAI_dailyTmax_predicted_",pred_mod_name,".*.tif"),full.names=T)}, |
|
283 |
mc.preschedule=FALSE,mc.cores = num_cores) |
|
284 |
list_names_tile_coord <- lapply(list_lf_raster_tif_tiles,FUN=function(x){basename(dirname(dirname(x)))}) |
|
285 |
list_names_tile_id <- paste("tile",1:length(list_lf_raster_tif_tiles),sep="_") |
|
286 |
names(list_lf_raster_tif_tiles)<- list_names_tile_id |
|
287 |
|
|
288 |
#one level up |
|
289 |
#lf_covar_obj <- lapply(in_dir_list,FUN=function(x){list.files(path=x,pattern="covar_obj.*.RData",full.names=T)}) |
|
290 |
#lf_covar_tif <- lapply(in_dir_list,FUN=function(x){list.files(path=x,pattern="covar.*.tif",full.names=T)}) |
|
291 |
|
|
292 |
#lf_sub_sampling_obj_files <- lapply(in_dir_list,FUN=function(x){list.files(path=x,pattern=paste("^sub_sampling_obj_",interpolation_method,".*.RData",sep=""),full.names=T)}) |
|
293 |
#lf_sub_sampling_obj_daily_files <- lapply(in_dir_list_tmp,FUN=function(x){list.files(path=x,pattern="^sub_sampling_obj_daily.*.RData",full.names=T)}) |
|
294 |
year_processed <- year_predicted |
|
295 |
if(is.null(day_to_mosaic_range)){ |
|
296 |
# start_date <- #first date |
|
297 |
start_date <- paste0(year_processed,"0101") #change this later!! |
|
298 |
end_date <- paste0(year_processed,"1231") #change this later!! |
|
299 |
day_to_mosaic <- seq(as.Date(strptime(start_date,"%Y%m%d")), as.Date(strptime(end_date,"%Y%m%d")), 'day') |
|
300 |
day_to_mosaic <- format(day_to_mosaic,"%Y%m%d") #format back to the relevant date format for files |
|
301 |
}else{ |
|
302 |
start_date <- day_to_mosaic_range[1] |
|
303 |
end_date <- day_to_mosaic_range[2] |
|
304 |
day_to_mosaic <- seq(as.Date(strptime(start_date,"%Y%m%d")), as.Date(strptime(end_date,"%Y%m%d")), 'day') |
|
305 |
day_to_mosaic <- format(day_to_mosaic,"%Y%m%d") #format back to the relevant date format for files |
|
306 |
} |
|
307 |
|
|
308 |
in_dir_tiles_tmp <- in_dir1 # |
|
309 |
#in_dir_tiles_tmp <- in_dir_reg |
|
310 |
|
|
311 |
### Do this by tile!!! |
|
312 |
|
|
313 |
#gam_CAI_dailyTmax_predicted_mod1_0_1_20001231_30_1-39.7_165.1.tif |
|
314 |
|
|
315 |
#undebug(check_missing) |
|
316 |
test_missing <- try(lapply(1:length(list_lf_raster_tif_tiles),function(i){check_missing(lf=list_lf_raster_tif_tiles[[i]], |
|
317 |
pattern_str=NULL, |
|
318 |
in_dir=out_dir, |
|
319 |
date_start=start_date, |
|
320 |
date_end=end_date, |
|
321 |
item_no=item_no, #9 for predicted tiles |
|
322 |
out_suffix=out_suffix, |
|
323 |
num_cores=num_cores, |
|
324 |
out_dir=".")})) |
|
325 |
|
|
326 |
|
|
327 |
df_time_series <- test_missing[[1]]$df_time_series |
|
328 |
head(df_time_series) |
|
329 |
|
|
330 |
table(df_time_series$missing) |
|
331 |
table(df_time_series$year) |
|
332 |
|
|
333 |
##### |
|
334 |
#Now combine df_time_series in one table |
|
335 |
|
|
336 |
dim(test_missing[[1]]$df_time_series) |
|
337 |
list_lf <- lapply(1:length(test_missing),FUN=function(i){df_time_series <- as.character(test_missing[[i]]$df_time_series$lf)}) |
|
338 |
df_lf_tiles_time_series <- as.data.frame(do.call(cbind,list_lf)) |
|
339 |
#http://stackoverflow.com/questions/26220913/replace-na-with-na |
|
340 |
#Use dfr[dfr=="<NA>"]=NA where dfr is your dataframe. |
|
341 |
names(df_lf_tiles_time_series) <- unlist(basename(in_dir_reg)) |
|
342 |
filename_df_lf_tiles <- paste0("df_files_by_tiles_predicted_tif_",region_name,"_",pred_mod_name,"_",out_suffix) |
|
343 |
write.table(df_lf_tiles_time_series,file=filename_df_lf_tiles) |
|
344 |
|
|
345 |
###Now combined missing in one table? |
|
346 |
|
|
347 |
list_missing <- lapply(1:length(test_missing),FUN=function(i){df_time_series <- test_missing[[i]]$df_time_series$missing}) |
|
348 |
|
|
349 |
df_missing <- as.data.frame(do.call(cbind,list_missing)) |
|
350 |
names(df_missing) <- unlist(basename(in_dir_reg)) |
|
351 |
df_missing$tot_missing <- rowSums (df_missing, na.rm = FALSE, dims = 1) |
|
352 |
df_missing$reg <- region_name |
|
353 |
df_missing$date <- day_to_mosaic |
|
354 |
|
|
355 |
filename_df_missing <- paste0("df_missing_by_tiles_predicted_tif_",region_name,"_",pred_mod_name,"_",out_suffix) |
|
356 |
write.table(df_missing,file=filename_df_missing) |
|
357 |
|
|
358 |
######################## |
|
359 |
#### Step 2: examine overlap |
|
360 |
|
|
361 |
path_to_shp <- dirname(countries_shp) |
|
362 |
layer_name <- sub(".shp","",basename(countries_shp)) |
|
363 |
reg_layer <- readOGR(path_to_shp, layer_name) |
|
364 |
|
|
365 |
#collect info: read in all shapefiles |
|
366 |
|
|
367 |
obj_centroids_shp <- centroids_shp_fun(1,list_shp_reg_files=in_dir_shp_list) |
|
368 |
|
|
369 |
obj_centroids_shp <- mclapply(1:length(in_dir_shp_list), |
|
370 |
FUN=centroids_shp_fun, |
|
371 |
list_shp_reg_files=in_dir_shp_list, |
|
372 |
mc.preschedule=FALSE, |
|
373 |
mc.cores = num_cores) |
|
374 |
|
|
375 |
centroids_pts <- lapply(obj_centroids_shp, FUN=function(x){x$centroid}) |
|
376 |
shps_tiles <- lapply(obj_centroids_shp, FUN=function(x){x$spdf}) |
|
377 |
|
|
378 |
#remove try-error polygons...we loose three tiles because they extend beyond -180 deg |
|
379 |
tmp <- shps_tiles |
|
380 |
shps_tiles <- remove_errors_list(shps_tiles) #[[!inherits(shps_tiles,"try-error")]] |
|
381 |
#shps_tiles <- tmp |
|
382 |
length(tmp)-length(shps_tiles) #number of tiles with error message |
|
383 |
|
|
384 |
tmp_pts <- centroids_pts |
|
385 |
centroids_pts <- remove_errors_list(centroids_pts) #[[!inherits(shps_tiles,"try-error")]] |
|
386 |
#centroids_pts <- tmp_pts |
|
387 |
|
|
388 |
r_mask <- raster(infile_mask) |
|
389 |
plot(r) |
|
390 |
plot(shps_tiles[[1]],add=T,border="blue",usePolypath = FALSE) #added usePolypath following error on brige and NEX |
|
391 |
|
|
392 |
## find overlap |
|
393 |
#http://gis.stackexchange.com/questions/156660/loop-to-check-multiple-polygons-overlap-in-r |
|
394 |
|
|
395 |
matrix_overlap <- matrix(data=NA,nrow=length(shps_tiles),ncol=length(shps_tiles)) |
|
396 |
for(i in 1:length(shps_tiles)){ |
|
397 |
for(j in 1:length(shps_tiles)){ |
|
398 |
overlap_val <- as.numeric(over(shps_tiles[[i]],shps_tiles[[j]])) |
|
399 |
matrix_overlap[i,j]<- overlap_val |
|
400 |
} |
|
401 |
# |
|
402 |
} |
|
403 |
|
|
404 |
names(shps_tiles) <- basename(unlist(in_dir_reg)) |
|
405 |
r_ref <- raster(list_lf_raster_tif_tiles[[1]][1]) |
|
406 |
list_r_ref <- lapply(1:length(in_dir_reg), function(i){raster(list_lf_raster_tif_tiles[[i]][1])}) |
|
407 |
tile_spdf <- shps_tiles[[1]] |
|
408 |
tile_coord <- basename(in_dir_reg[1]) |
|
409 |
date_val <- df_missing$date[1] |
|
410 |
|
|
411 |
### use rasterize |
|
412 |
spdf_tiles <- do.call(bind, shps_tiles) #bind all tiles together in one shapefile |
|
413 |
|
|
414 |
#function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11052016.R" |
|
415 |
#source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script |
|
416 |
|
|
417 |
#undebug(rasterize_tile_day) |
|
418 |
list_predicted <- rasterize_tile_day(1, |
|
419 |
list_spdf=shps_tiles, |
|
420 |
df_missing=df_missing, |
|
421 |
list_r_ref=list_r_ref, |
|
422 |
col_name="overlap", |
|
423 |
date_val=df_missing$date[1]) |
|
424 |
#list_predicted <- mclapply(1:6, |
|
425 |
# FUN=rasterize_tile_day, |
|
426 |
# list_spdf=shps_tiles, |
|
427 |
# df_missing=df_missing, |
|
428 |
# list_r_ref=list_r_ref, |
|
429 |
# col_name = "overlap", |
|
430 |
# date_val=df_missing$date[1], |
|
431 |
# mc.preschedule=FALSE, |
|
432 |
# mc.cores = num_cores) |
|
433 |
|
|
434 |
list_predicted <- mclapply(1:length(shps_tiles), |
|
435 |
FUN=rasterize_tile_day, |
|
436 |
list_spdf=shps_tiles, |
|
437 |
df_missing=df_missing, |
|
438 |
list_r_ref=list_r_ref, |
|
439 |
col_name = "overlap", |
|
440 |
date_val=df_missing$date[1], |
|
441 |
mc.preschedule=FALSE, |
|
442 |
mc.cores = num_cores) |
|
443 |
|
|
444 |
##check that everything is correct: |
|
445 |
plot(r_mask) |
|
446 |
plot(raster(list_predicted[[1]]),add=T) |
|
447 |
plot(spdf_tiles_test,add=T,border="green",usePolypath = FALSE) #added usePolypath following error on brige and NEX |
|
448 |
|
|
449 |
### Make a list of file |
|
450 |
out_suffix_str_tmp <- paste0(region_name,"_",out_suffix) |
|
451 |
out_dir_str <- out_dir |
|
452 |
filename_list_predicted <- file.path(out_dir_str,paste("list_to_mosaics_",out_suffix_str_tmp,".txt",sep="")) |
|
453 |
|
|
454 |
#writeLines(unlist(list_weights_m),con=filename_list_mosaics_weights_m) #weights files to mosaic |
|
455 |
#writeLines(unlist(list_weights_prod_m),con=filename_list_mosaics_prod_weights_m) #prod weights files to mosaic |
|
456 |
|
|
457 |
writeLines(unlist(list_predicted),con=filename_list_predicted) #weights files to mosaic |
|
458 |
|
|
459 |
#out_mosaic_name_weights_m <- r_weights_sum_raster_name <- file.path(out_dir,paste("r_weights_sum_m_",mosaic_method,"_weighted_mean_",out_suffix,".tif",sep="")) |
|
460 |
#out_mosaic_name_prod_weights_m <- r_weights_sum_raster_name <- file.path(out_dir,paste("r_prod_weights_sum_m_",mosaic_method,"_weighted_mean_",out_suffix,".tif",sep="")) |
|
461 |
out_mosaic_name_predicted_m <- file.path(out_dir_str,paste("r_overlap_sum_m_",out_suffix_str_tmp,".tif",sep="")) |
|
462 |
|
|
463 |
rast_ref_name <- infile_mask |
|
464 |
mosaic_python <- "/nobackupp6/aguzman4/climateLayers/sharedCode/" |
|
465 |
rast_ref_name <- infile_mask |
|
466 |
#python /nobackupp6/aguzman4/climateLayers/sharedCode//gdal_merge_sum.py --config GDAL_CACHEMAX=1500 --overwrite=TRUE -o /nobackupp8/bparmen1/climateLayers/out |
|
467 |
mosaic_overlap_tiles_obj <- mosaic_python_merge(NA_flag_val=NA_flag_val, |
|
468 |
module_path=mosaic_python, |
|
469 |
module_name="gdal_merge_sum.py", |
|
470 |
input_file=filename_list_predicted, |
|
471 |
out_mosaic_name=out_mosaic_name_predicted_m, |
|
472 |
raster_ref_name = rast_ref_name) ##if NA, not take into account |
|
473 |
r_overlap_raster_name <- mosaic_overlap_tiles_obj$out_mosaic_name |
|
474 |
cmd_str1 <- mosaic_overlap_tiles_obj$cmd_str |
|
475 |
|
|
476 |
r_overlap <- raster(r_overlap_raster_name) |
|
477 |
r_mask <- raster(infile_mask) |
|
478 |
|
|
479 |
out_mosaic_name_overlap_masked <- file.path(out_dir_str,paste("r_overlap_sum_masked_",out_suffix_str_tmp,".tif",sep="")) |
|
480 |
|
|
481 |
r_overlap_m <- mask(r_overlap,r_mask,filename=out_mosaic_name_overlap_masked,overwrite=T) |
|
482 |
#plot(r_overlap_m) |
|
483 |
#plot(spdf_tiles_test,add=T,border="green",usePolypath = FALSE) #added usePolypath following error on brige and NEX |
|
484 |
|
|
485 |
r_table <- ratify(r_overlap_m) # build the Raster Attibute table |
|
486 |
rat <- levels(r_table)[[1]]#get the values of the unique cell frot the attribute table |
|
487 |
#rat$legend <- paste0("tile_",1:26) |
|
488 |
tb_freq <- as.data.frame(freq(r_table)) |
|
489 |
rat$legend <- tb_freq$value |
|
490 |
levels(r_table) <- rat |
|
491 |
|
|
492 |
res_pix <- 800 |
|
493 |
#res_pix <- 480 |
|
494 |
col_mfrow <- 1 |
|
495 |
row_mfrow <- 1 |
|
496 |
|
|
497 |
png_filename <- file.path(out_dir,paste("Figure_maximum_overlap_",region_name,"_",out_suffix,".png",sep ="")) |
|
498 |
|
|
499 |
title_str <- paste("Maximum overlap: Number of predicted pixels for ",variable_name, sep = "") |
|
500 |
|
|
501 |
png(filename=png_filename,width = col_mfrow * res_pix,height = row_mfrow * res_pix) |
|
502 |
#my_col=c('blue','red','green') |
|
503 |
my_col <- rainbow(length(tb_freq$value)) |
|
504 |
|
|
505 |
plot(r_table,col=my_col,legend=F,box=F,axes=F,main=title_str) |
|
506 |
legend(x='topright', legend =rat$legend,fill = my_col,cex=0.8) |
|
507 |
|
|
508 |
dev.off() |
|
509 |
|
|
510 |
### now assign id and match extent for tiles |
|
511 |
|
|
512 |
lf_files <- unlist(list_predicted) |
|
513 |
rast_ref_name <- infile_mask |
|
514 |
rast_ref <- rast_ref_name |
|
515 |
|
|
516 |
##Maching resolution is probably only necessary for the r mosaic function |
|
517 |
#Modify later to take into account option R or python... |
|
518 |
list_param_raster_match <- list(lf_files,rast_ref,file_format,python_bin,out_suffix_str_tmp,out_dir_str) |
|
519 |
names(list_param_raster_match) <- c("lf_files","rast_ref","file_format","python_bin","out_suffix","out_dir_str") |
|
520 |
|
|
521 |
#undebug(raster_match) |
|
522 |
#r_test <- raster_match(1,list_param_raster_match) |
|
523 |
#r_test <- raster(raster_match(1,list_param_raster_match)) |
|
524 |
|
|
525 |
list_tiles_predicted_m <- unlist(mclapply(1:length(lf_files), |
|
526 |
FUN=raster_match,list_param=list_param_raster_match, |
|
527 |
mc.preschedule=FALSE,mc.cores = num_cores)) |
|
528 |
|
|
529 |
extension_str <- extension(lf_files) |
|
530 |
raster_name_tmp <- gsub(extension_str,"",basename(lf_files)) |
|
531 |
out_suffix_str <- paste0(region_name,"_",out_suffix) |
|
532 |
raster_name <- file.path(out_dir_str,paste(raster_name_tmp,"_","masked_",out_suffix_str,file_format,sep="")) |
|
533 |
|
|
534 |
#writeRaster(r, NAflag=NA_flag_val,filename=raster_name,overwrite=TRUE) |
|
535 |
|
|
536 |
#r_stack <- stack(list_tiles_predicted_m) |
|
537 |
list_mask_out_file_name <- raster_name |
|
538 |
list_tiles_predicted_masked <- unlist(mclapply(1:length(list_tiles_predicted_m), |
|
539 |
FUN=function(i){mask(raster(list_tiles_predicted_m[i]),r_mask,filename=list_mask_out_file_name[i])}, |
|
540 |
mc.preschedule=FALSE,mc.cores = num_cores)) |
|
541 |
#r_stack_masked <- mask(r, m2) #, maskvalue=TRUE) |
|
542 |
|
|
543 |
######################## |
|
544 |
#### Step 3: combine overlap information and number of predictions by day |
|
545 |
##Now loop through every day if missing then generate are raster showing map of number of prediction |
|
546 |
|
|
547 |
#r_tiles_stack <- stack(list_tiles_predicted_masked) |
|
548 |
#names(r_tiles_stack) <- basename(in_dir_reg) #this does not work, X. is added to the name, use list instead |
|
549 |
|
|
550 |
names(list_tiles_predicted_masked) <- basename(in_dir_reg) |
|
551 |
df_missing_tiles_day <- subset(df_missing,tot_missing > 0) |
|
552 |
#r_tiles_s <- r_tiles_stack |
|
553 |
names_tiles <- basename(in_dir_reg) |
|
554 |
|
|
555 |
|
|
556 |
list_param_generate_raster_number_pred <- list(list_tiles_predicted_masked,df_missing_tiles_day,r_overlap_m, |
|
557 |
num_cores,region_name, |
|
558 |
NA_flag_val,out_suffix,out_dir) |
|
559 |
|
|
560 |
names(list_param_generate_raster_number_pred) <- c("list_tiles_predicted_masked","df_missing_tiles_day","r_overlap_m", |
|
561 |
"num_cores","region_name", |
|
562 |
"NA_flag_val","out_suffix","out_dir") |
|
563 |
|
|
564 |
#function_product_assessment_part0_functions <- "global_product_assessment_part0_functions_11152016b.R" |
|
565 |
#source(file.path(script_path,function_product_assessment_part0_functions)) #source all functions used in this script |
|
566 |
|
|
567 |
#debug(generate_raster_number_of_prediction_by_day) |
|
568 |
|
|
569 |
#obj_number_pix_predictions <- generate_raster_number_of_prediction_by_day(1,list_param_generate_raster_number_pred) |
|
570 |
|
|
571 |
obj_number_pix_predictions <- mcapply(1:nrow(df_missing_tiles_day), |
|
572 |
FUN=generate_raster_number_of_prediction_by_day, |
|
573 |
list_param=list_param_generate_raster_number_pred, |
|
574 |
mc.preschedule=FALSE, |
|
575 |
mc.cores = num_cores) |
|
576 |
|
|
577 |
## Make a function, |
|
578 |
#for specifi i (day) select tile with missing info, load it and subsetract to overlap raster, save it. |
|
579 |
|
|
580 |
#http://stackoverflow.com/questions/19586945/how-to-legend-a-raster-using-directly-the-raster-attribute-table-and-displaying |
|
581 |
# |
|
582 |
#http://gis.stackexchange.com/questions/148398/how-does-spatial-polygon-over-polygon-work-to-when-aggregating-values-in-r |
|
583 |
#ok other way of doing this: |
|
584 |
#1. find overlap assuming all predictions! |
|
585 |
#2. Us raster image with number of overlaps in the mosaic tile |
|
586 |
#3. for every pixel generate and ID (tile ID) as integer, there should be 26 layers at the mosaic extent |
|
587 |
#4. generate a table? for each pixel it can say if is part of a specific tile |
|
588 |
#5. workout a formula to generate the number of predictions for each pixel based on tile predicted for each date!! |
|
589 |
|
|
590 |
return() |
|
591 |
} |
|
592 | 196 |
|
593 | 197 |
############################ END OF SCRIPT ################################## |
594 | 198 |
|
Also available in: Unified diff
moving function of number of predictions for day with missing tiles to functon script