Revision 63f27fec
Added by Benoit Parmentier about 12 years ago
climate/research/oregon/modis-lst/LST_downloading_and_climatology.py | ||
---|---|---|
285 | 285 |
## proj4='+proj=sinu +R=6371007.181 +nadgrids=@null +wktext') |
286 | 286 |
|
287 | 287 |
|
288 |
#### Added by Benoit on 18 October 2012
|
|
288 |
#### MOdified by Benoit in October 2012 and January 2013
|
|
289 | 289 |
|
290 | 290 |
#Parameters |
291 |
tiles = ['h11v08','h11v07','h12v08'] #These tiles correspond to Venezuela. |
|
291 |
tiles = ['h11v08','h11v07','h12v07','h12v08','h10v07','h10v08'] #These tiles correspond to Venezuela.
|
|
292 | 292 |
start_year = 2001 |
293 | 293 |
end_year = 2010 |
294 | 294 |
end_month=12 |
295 | 295 |
start_month=1 |
296 | 296 |
hdfdir = '/home/parmentier/Data/benoit_test' #destination file where hdf files are stored locally after download. |
297 |
night=0 # if 1 then produce night climatology |
|
298 |
out_suffix="_01252013" |
|
299 |
download=1 # if 1 then download files |
|
297 | 300 |
|
298 | 301 |
################## First step: download data ################### |
299 | 302 |
# Added tile loop |
... | ... | |
301 | 304 |
#for testing... |
302 | 305 |
#year_list=[2001,2002] |
303 | 306 |
#hdfdir = '/home/parmentier/Data/benoit_test2' |
304 |
for tile in tiles: |
|
305 |
for year in year_list: |
|
306 |
start_doy = 1 |
|
307 |
end_doy = 365 |
|
308 |
if calendar.isleap(year): |
|
309 |
end_doy=366 |
|
310 |
|
|
311 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
307 |
#tiles = ['h10v07','h10v08','h12v07'] |
|
308 |
if download==1: |
|
309 |
for tile in tiles: |
|
310 |
for year in year_list: |
|
311 |
start_doy = 1 |
|
312 |
end_doy = 365 |
|
313 |
if calendar.isleap(year): |
|
314 |
end_doy=366 |
|
315 |
|
|
316 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
312 | 317 |
|
313 | 318 |
# modify loop to take into account "hdfs", list of hdf files needed in the next steps… |
314 |
|
|
315 | 319 |
################# Second step: compute climatology ################# |
316 | 320 |
## Process tile by tile... |
317 |
|
|
318 |
##NEED TO LOOP OVER DIFFERENT TILES!!! |
|
319 | 321 |
##start loop for tile |
320 |
#load a file that will define the region of interest and location at the same time |
|
321 |
|
|
322 |
#for tile in tiles: |
|
323 | 322 |
#tile= 'h12v08' |
324 |
tile=tiles[2] |
|
325 |
|
|
326 |
# Set up a temporary GRASS data base |
|
327 |
|
|
328 |
tmp_location = 'tmp' + str(os.getpid()) # Name of the temporary GRASS data base |
|
329 |
orig_location = gs.gisenv()['LOCATION_NAME'] |
|
330 |
orig_mapset = gs.gisenv()['MAPSET'] |
|
331 |
gs.os.environ['GRASS_OVERWRITE'] = '1' # Allow for overwrite?? GRASS data base |
|
323 |
#tiles = ['h12v07','h12v08'] |
|
324 |
var_name = ['LST_Day_1km','LST_Night_1km'] |
|
325 |
if night==1: |
|
326 |
lst_var = var_name[1] |
|
327 |
if night==0: |
|
328 |
lst_var = var_name[0] |
|
332 | 329 |
|
333 |
path_file = hdfdir |
|
334 |
os.chdir(path_file) #set working directory |
|
335 |
os.getcwd() #get current working directory |
|
336 |
listfiles_wd2 = glob.glob('*'+tile+'*'+'.hdf') #list the all the LST files of interest |
|
337 |
name_of_file = listfiles_wd2[0] |
|
338 |
#name_of_file = 'MOD11A1.A2003364.h12v08.005.2008163211649.hdf' |
|
339 |
# first load daytime LST, by accessing specific layers in the hdd?? |
|
340 |
#SUBDATASET_1_NAME=HDF4_EOS:EOS_GRID:"MOD11A1.A2003364.h12v08.005.2008163211649.hdf":MODIS_Grid_Daily_1km_LST:LST_Day_1km |
|
341 |
#path_file = os.getcwd() |
|
342 |
#Create the name of the layer to extract from the hdf file used for definition of the database |
|
343 |
lst1day = 'HDF4_EOS:EOS_GRID:%s/%s:%s' % ( |
|
344 |
path_file, |
|
345 |
name_of_file, |
|
346 |
'MODIS_Grid_Daily_1km_LST:LST_Day_1km') |
|
347 |
#change the above line later to allow night LST for tmin |
|
348 |
|
|
349 |
#now import one image and set the location, mapset and database !!create name per tile |
|
350 |
gs.run_command('r.in.gdal', input=lst1day, output='LST_1day_'+tile, |
|
351 |
location=tmp_location) |
|
352 |
|
|
353 |
# Now that the new location has been create, switch to new location |
|
354 |
gs.run_command('g.gisenv', set='LOCATION_NAME=%s' % tmp_location) |
|
355 |
gs.run_command('g.gisenv', set='MAPSET=PERMANENT') |
|
356 |
|
|
357 |
# set GRASS the projection system and GRASS region to match the extent of the file |
|
358 |
gs.run_command('g.proj', flags='c', |
|
359 |
proj4='+proj=sinu +a=6371007.181 +b=6371007.181 +ellps=sphere') |
|
360 |
gs.run_command('g.region', rast='LST_1day_'+tile) |
|
361 |
|
|
362 |
#generate monthly pixelwise mean & count of high-quality daytime LST |
|
363 |
gs.os.environ['GRASS_OVERWRITE'] = '1' |
|
364 |
|
|
365 |
#Provide a list of file "hdfs" to process…this should be in a loop... |
|
366 |
#tile= 'h12v08' |
|
367 |
fileglob = 'MOD11A1.A*.%s*hdf' % (tile) |
|
368 |
pathglob = os.path.join(path_file, fileglob) |
|
369 |
files = glob.glob(pathglob) |
|
370 |
hdfs = files |
|
371 |
### LST values in images must be masked out if they have low quality: this is done using QC flags from the layer subset in hdf files |
|
372 |
LST = [load_qc_adjusted_lst(hdf) for hdf in hdfs] |
|
373 |
### LST is a list that contains the name of the new lst files in the GRASS format. The files are stored in the temporary GRASS database. |
|
374 |
list_maps_month=list_raster_per_month(LST) |
|
375 |
list_maps_name=['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec'] |
|
376 |
##Now calculate clim per month, do a test for year1 |
|
377 |
nb_month = 12 |
|
378 |
for i in range(1,nb_month+1): |
|
379 |
clims = calc_clim(list_maps_month[i-1],tile+'_'+list_maps_name[i-1]+'_'+str(i-1)) |
|
380 |
for j in range(1, len(clims)) |
|
381 |
gs.run_command('r.out.gdal', input= clims[0], output=clims[0]+'.tif', type='Float64') |
|
382 |
#clims = calc_clim(LST, 'LST_%s_%d_%02d' % (tile, year, month)) |
|
330 |
#start = time.clock() |
|
331 |
for tile in tiles: |
|
332 |
# Set up a temporary GRASS data base |
|
333 |
tmp_location = 'tmp' + "_"+ tile + "_"+ str(os.getpid()) # Name of the temporary GRASS data base |
|
334 |
orig_location = gs.gisenv()['LOCATION_NAME'] |
|
335 |
orig_mapset = gs.gisenv()['MAPSET'] |
|
336 |
gs.os.environ['GRASS_OVERWRITE'] = '1' # Allow for overwrite?? GRASS data base |
|
337 |
|
|
338 |
path_file = hdfdir |
|
339 |
os.chdir(path_file) #set working directory |
|
340 |
os.getcwd() #get current working directory |
|
341 |
listfiles_wd2 = glob.glob('*'+tile+'*'+'.hdf') #list the all the LST files of interest |
|
342 |
name_of_file = listfiles_wd2[0] |
|
343 |
#Create the name of the layer to extract from the hdf file used for definition of the database |
|
344 |
lst1day = 'HDF4_EOS:EOS_GRID:%s/%s:%s' % ( |
|
345 |
path_file, |
|
346 |
name_of_file, |
|
347 |
'MODIS_Grid_Daily_1km_LST:'+ lst_var) |
|
348 |
#'LST_Night_1km' |
|
349 |
#change the above line later to allow night LST for tmin |
|
350 |
|
|
351 |
#now import one image and set the location, mapset and database !!create name per tile |
|
352 |
gs.run_command('r.in.gdal', input=lst1day, output='LST_1day_'+tile, |
|
353 |
location=tmp_location) |
|
383 | 354 |
|
355 |
# Now that the new location has been create, switch to new location |
|
356 |
gs.run_command('g.gisenv', set='LOCATION_NAME=%s' % tmp_location) |
|
357 |
gs.run_command('g.gisenv', set='MAPSET=PERMANENT') |
|
384 | 358 |
|
385 |
#Now write out files: added on 01/22/2013 |
|
386 |
gs.run_command('r.out.gdal', input= clims[0], output=clims[0]+'.tif', type='Float64') |
|
387 |
# clean up if necessary |
|
388 |
gs.run_command('g.remove', rast=','.join(LST)) |
|
389 |
gs.os.environ['GRASS_OVERWRITE'] = '0' |
|
359 |
# set GRASS the projection system and GRASS region to match the extent of the file |
|
360 |
gs.run_command('g.proj', flags='c', |
|
361 |
proj4='+proj=sinu +a=6371007.181 +b=6371007.181 +ellps=sphere') |
|
362 |
gs.run_command('g.region', rast='LST_1day_'+tile) |
|
390 | 363 |
|
391 |
# clean up |
|
392 |
gs.run_command('g.gisenv', set='LOCATION_NAME=%s' % orig_location) |
|
393 |
gs.run_command('g.gisenv', set='MAPSET=%s' % orig_mapset) |
|
394 |
shutil.rmtree(os.path.join(gs.gisenv()['GISDBASE'], tmp_location)) |
|
364 |
#generate monthly pixelwise mean & count of high-quality daytime LST |
|
365 |
gs.os.environ['GRASS_OVERWRITE'] = '1' |
|
366 |
|
|
367 |
#Provide a list of file "hdfs" to process…this should be in a loop... |
|
368 |
#tile= 'h12v08' |
|
369 |
fileglob = 'MOD11A1.A*.%s*hdf' % (tile) |
|
370 |
pathglob = os.path.join(path_file, fileglob) |
|
371 |
files = glob.glob(pathglob) |
|
372 |
hdfs = files |
|
373 |
### LST values in images must be masked out if they have low quality, determined from QC flags stored in hdf files |
|
374 |
LST = [load_qc_adjusted_lst(hdf) for hdf in hdfs] |
|
375 |
### LST is a list that contains the name of the new lst files in the GRASS format. The files are stored in the temporary GRASS database. |
|
376 |
list_maps_month=list_raster_per_month(LST) |
|
377 |
list_maps_name=['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec'] |
|
378 |
##Now calculate clim per month, do a test for year1 |
|
379 |
nb_month = 12 |
|
380 |
for i in range(1,nb_month+1): |
|
381 |
clims = calc_clim(list_maps_month[i-1],tile+'_'+list_maps_name[i-1]+'_'+str(i-1)) |
|
382 |
for j in range(1, len(clims)+1): |
|
383 |
if j-1 ==0: |
|
384 |
gs.run_command('r.out.gdal', input= clims[j-1], output=clims[j-1]+ out_suffix +'.tif', type='Float32') |
|
385 |
if j-1 ==1: |
|
386 |
gs.mapcalc(' clim_rescaled = ('+ clims[j-1 ]+ ' * 0.02) -273.15') |
|
387 |
gs.run_command('r.out.gdal', input= 'clim_rescaled', output=clims[j-1]+ out_suffix+'.tif', type='Float32') |
|
388 |
#clims = calc_clim(LST, 'LST_%s_%d_%02d' % (tile, year, month)) |
|
389 |
# clean up if necessary |
|
390 |
|
|
391 |
#gs.run_command('g.remove', rast=','.join(LST)) |
|
392 |
#gs.os.environ['GRASS_OVERWRITE'] = '0' |
|
393 |
|
|
394 |
# clean up |
|
395 |
#gs.run_command('g.gisenv', set='LOCATION_NAME=%s' % orig_location) |
|
396 |
#gs.run_command('g.gisenv', set='MAPSET=%s' % orig_mapset) |
|
397 |
#shutil.rmtree(os.path.join(gs.gisenv()['GISDBASE'], tmp_location)) |
|
395 | 398 |
|
396 | 399 |
return None |
Also available in: Unified diff
LST climatology, added tile loop, downloading and calculation of climatology for 6 tiles in Venezuela