Revision bb5e3f36
Added by Benoit Parmentier about 12 years ago
climate/research/oregon/modis-lst/LST_downloading_and_climatology.py | ||
---|---|---|
294 | 294 |
end_month=12 |
295 | 295 |
start_month=1 |
296 | 296 |
hdfdir = '/home/parmentier/Data/benoit_test' #destination file where hdf files are stored locally after download. |
297 |
|
|
297 | 298 |
################## First step: download data ################### |
298 | 299 |
# Added tile loop |
299 | 300 |
year_list=range(start_year,end_year+1) #list of year to loop through |
300 |
year_list=[2001,2002] |
|
301 |
for tile in tiles: |
|
301 |
#for testing... |
|
302 |
#year_list=[2001,2002] |
|
303 |
#hdfdir = '/home/parmentier/Data/benoit_test2' |
|
304 |
for tile in tiles: |
|
302 | 305 |
for year in year_list: |
303 |
if year==end_year: |
|
304 |
start_doy, end_doy = get_doy_range(year, end_month) |
|
305 |
start_doy=1 #Fix problem later |
|
306 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
307 |
|
|
308 |
elif year==start_year: |
|
309 |
start_doy, end_doy = get_doy_range(year, start_month) |
|
310 |
end_doy=365 |
|
311 |
if calendar.isleap(year): |
|
312 |
end_doy=366 |
|
313 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
314 |
|
|
315 |
elif (year!=start_year and year!=end_year): |
|
316 |
start_doy=1 |
|
317 |
end_doy=365 |
|
318 |
if calendar.isleap(year): |
|
319 |
end_doy=366 |
|
320 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
306 |
start_doy = 1 |
|
307 |
end_doy = 365 |
|
308 |
if calendar.isleap(year): |
|
309 |
end_doy=366 |
|
310 |
|
|
311 |
hdfs = download_mod11a1(hdfdir, tile, start_doy, end_doy, year) |
|
321 | 312 |
|
322 | 313 |
# modify loop to take into account "hdfs", list of hdf files needed in the next steps… |
323 | 314 |
|
324 | 315 |
################# Second step: compute climatology ################# |
316 |
## Process tile by tile... |
|
317 |
|
|
318 |
##NEED TO LOOP OVER DIFFERENT TILES!!! |
|
319 |
##start loop for tile |
|
320 |
#load a file that will define the region of interest and location at the same time |
|
321 |
|
|
322 |
#for tile in tiles: |
|
323 |
#tile= 'h12v08' |
|
324 |
tile=tiles[2] |
|
325 | 325 |
|
326 | 326 |
# Set up a temporary GRASS data base |
327 | 327 |
|
... | ... | |
329 | 329 |
orig_location = gs.gisenv()['LOCATION_NAME'] |
330 | 330 |
orig_mapset = gs.gisenv()['MAPSET'] |
331 | 331 |
gs.os.environ['GRASS_OVERWRITE'] = '1' # Allow for overwrite?? GRASS data base |
332 |
|
|
333 |
##NEED TO LOOP OVER DIFFERENT TILES!!! |
|
334 |
##start loop for tile |
|
335 |
#load a file that will define the region of interest and location at the same time |
|
336 | 332 |
|
337 |
tile= 'h12v08' |
|
338 |
tile=tiles[2] |
|
339 |
#tile= 'h12v08' |
|
340 |
|
|
341 |
path_file = '/home/parmentier/Data/benoit_test' |
|
333 |
path_file = hdfdir |
|
342 | 334 |
os.chdir(path_file) #set working directory |
343 | 335 |
os.getcwd() #get current working directory |
344 | 336 |
listfiles_wd2 = glob.glob('*'+tile+'*'+'.hdf') #list the all the LST files of interest |
... | ... | |
352 | 344 |
path_file, |
353 | 345 |
name_of_file, |
354 | 346 |
'MODIS_Grid_Daily_1km_LST:LST_Day_1km') |
347 |
#change the above line later to allow night LST for tmin |
|
355 | 348 |
|
356 | 349 |
#now import one image and set the location, mapset and database !!create name per tile |
357 |
gs.run_command('r.in.gdal', input=lst1day, output='LST_1day_h12v08',
|
|
350 |
gs.run_command('r.in.gdal', input=lst1day, output='LST_1day_'+tile,
|
|
358 | 351 |
location=tmp_location) |
359 | 352 |
|
360 | 353 |
# Now that the new location has been create, switch to new location |
... | ... | |
364 | 357 |
# set GRASS the projection system and GRASS region to match the extent of the file |
365 | 358 |
gs.run_command('g.proj', flags='c', |
366 | 359 |
proj4='+proj=sinu +a=6371007.181 +b=6371007.181 +ellps=sphere') |
367 |
gs.run_command('g.region', rast='LST_1day_h12v08')
|
|
360 |
gs.run_command('g.region', rast='LST_1day_'+tile)
|
|
368 | 361 |
|
369 |
|
|
370 | 362 |
#generate monthly pixelwise mean & count of high-quality daytime LST |
371 |
|
|
372 | 363 |
gs.os.environ['GRASS_OVERWRITE'] = '1' |
373 | 364 |
|
374 |
#Provide a list of file "hdfs" to process… |
|
365 |
#Provide a list of file "hdfs" to process…this should be in a loop...
|
|
375 | 366 |
#tile= 'h12v08' |
376 | 367 |
fileglob = 'MOD11A1.A*.%s*hdf' % (tile) |
377 | 368 |
pathglob = os.path.join(path_file, fileglob) |
... | ... | |
383 | 374 |
list_maps_month=list_raster_per_month(LST) |
384 | 375 |
list_maps_name=['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec'] |
385 | 376 |
##Now calculate clim per month, do a test for year1 |
386 |
nb_month=12
|
|
377 |
nb_month = 12
|
|
387 | 378 |
for i in range(1,nb_month+1): |
388 |
#clims = calc_clim(LST, 'LST_%s_%d_%02d' % (tile, year, month))
|
|
389 |
clims = calc_clim(list_maps_month[i-1],list_maps_name[i-1]+'_'+str(i-1))
|
|
390 |
|
|
379 |
clims = calc_clim(list_maps_month[i-1],tile+'_'+list_maps_name[i-1]+'_'+str(i-1))
|
|
380 |
for j in range(1, len(clims))
|
|
381 |
gs.run_command('r.out.gdal', input= clims[0], output=clims[0]+'.tif', type='Float64') |
|
391 | 382 |
#clims = calc_clim(LST, 'LST_%s_%d_%02d' % (tile, year, month)) |
392 | 383 |
|
384 |
|
|
385 |
#Now write out files: added on 01/22/2013 |
|
386 |
gs.run_command('r.out.gdal', input= clims[0], output=clims[0]+'.tif', type='Float64') |
|
393 | 387 |
# clean up if necessary |
394 | 388 |
gs.run_command('g.remove', rast=','.join(LST)) |
395 | 389 |
gs.os.environ['GRASS_OVERWRITE'] = '0' |
Also available in: Unified diff
LST climatology, added loop to produce monhtly mean and write out tif from GRASS database