Revision b9e1a7f9
Added by Jim Regetz almost 13 years ago
- ID b9e1a7f9626579672b1b54d7367b6cbb104b56df
climate/extra/aggregate-daily-lst.py | ||
---|---|---|
21 | 21 |
# NCEAS |
22 | 22 |
# Created on 16-May-2012 |
23 | 23 |
|
24 |
import os |
|
24 |
import os, glob
|
|
25 | 25 |
import datetime |
26 | 26 |
import ftplib |
27 | 27 |
import grass.script as gs |
... | ... | |
48 | 48 |
for m in maplist]) |
49 | 49 |
gs.mapcalc('nobs_%s = %s' % (name, denominator), overwrite=overwrite) |
50 | 50 |
|
51 |
def get_hdf_paths(hdfdir, tile, start_doy, end_doy, year): |
|
52 |
"""Look in supplied directory to find the MODIS 11A1 HDF files |
|
53 |
matching a given tile, year, and day range. If for some unexpected |
|
54 |
reason there are multiple matches for a given day, only the first is |
|
55 |
used. If no matches, the day is skipped with a warning message. |
|
56 |
|
|
57 |
Arguments: |
|
58 |
hdfdir -- path to directory containing the desired HDFs |
|
59 |
tile -- tile identifier (e.g., 'h08v04') |
|
60 |
start_doy -- integer start of day range (0-366) |
|
61 |
end_doy -- integer end of day range (0-366) |
|
62 |
year -- integer year (>=2000) |
|
63 |
|
|
64 |
Returns list of absolute paths to the located HDF files. |
|
65 |
""" |
|
66 |
hdfs = list() |
|
67 |
for doy in range(start_doy, end_doy+1): |
|
68 |
fileglob = 'MOD11A1.A%d%03d.%s*hdf' % (year, doy, tile) |
|
69 |
pathglob = os.path.join(hdfdir, fileglob) |
|
70 |
files = glob.glob(pathglob) |
|
71 |
if len(files)==0: |
|
72 |
# no file found -- print message and move on |
|
73 |
print 'cannot access %s: no such file' % pathglob |
|
74 |
continue |
|
75 |
elif 1<len(files): |
|
76 |
# multiple files found! -- just use the first... |
|
77 |
print 'multiple hdfs found for tile', tile, 'on', day |
|
78 |
hdfs.append(files[0]) |
|
79 |
return hdfs |
|
80 |
|
|
51 | 81 |
# create raster of pixelwise means for a list of input rasters |
52 | 82 |
def calc_mean(maplist, name, overwrite=False): |
53 | 83 |
numerator = '(%s)' % '+'.join(['if(isnull(%s), 0, %s)' % (m, m) |
... | ... | |
150 | 180 |
ftp.quit() |
151 | 181 |
|
152 | 182 |
|
153 |
#----------------------------------------------------------------------- |
|
154 |
gs.os.environ['GRASS_OVERWRITE'] = '1' |
|
155 |
# load and qc-adjust all daily LSTs, return list of the GRASS map names |
|
156 |
LST = [load_qc_adjusted_lst(hdf) for hdf in hdfs] |
|
157 |
# calculate mean and nobs rasters |
|
158 |
calc_clim(LST, 'LST_%s_%d_%03d_%03d' % (tile, year, start_doy, end_doy)) |
|
159 |
# clean up |
|
160 |
gs.run_command('g.remove', rast=','.join(LST)) |
|
161 |
gs.os.environ['GRASS_OVERWRITE'] = '0' |
|
162 |
#----------------------------------------------------------------------- |
|
163 |
|
|
164 |
|
|
165 | 183 |
# |
166 | 184 |
# test 31-day mean with local files |
167 | 185 |
# |
168 | 186 |
|
169 |
# Note that some statements below are redundant with (and possibly |
|
170 |
# slightly different) from code above -- still need to consolidate |
|
171 |
|
|
172 | 187 |
start_doy = 122 |
173 | 188 |
end_doy = 152 |
189 |
year = 2005 |
|
190 |
hdfdir = '/home/layers/data/climate/MOD11A1.004-OR-orig' |
|
191 |
|
|
192 |
hdfs = get_hdf_paths(hdfdir, tile, start_doy, end_doy, year) |
|
174 | 193 |
|
175 | 194 |
gs.os.environ['GRASS_OVERWRITE'] = '1' |
176 |
ordir = '/home/layers/data/climate/MOD11A1.004-OR-orig-2000' |
|
177 |
LST = list() |
|
178 |
for doy in range(start_doy, end_doy+1): |
|
179 |
fileglob = 'MOD11A1.A%d%03d.%s*hdf' % (year, doy, tile) |
|
180 |
try: |
|
181 |
hdfpath = glob.glob(os.path.join(ordir, fileglob))[0] |
|
182 |
except: |
|
183 |
print '*** problem finding', fileglob, '***' |
|
184 |
continue |
|
185 |
hdfname = os.path.basename(hdfpath) |
|
186 |
LST.append('LST_' + '_'.join(hdfname.split('.')[1:3])) |
|
187 |
# TODO: handle failures here... |
|
188 |
gs.run_command('r.in.gdal', |
|
189 |
input='HDF4_EOS:EOS_GRID:%s:MODIS_Grid_Daily_1km_LST:LST_Day_1km' % hdfpath, |
|
190 |
output=LST[-1]) |
|
191 |
gs.os.environ['GRASS_OVERWRITE'] = '0' |
|
192 |
gs.run_command('g.region', rast=LST[0]) |
|
193 |
#numerator = '(%s)' % '+'.join(['if(isnull(%s), 0, %s)' % (lst, lst) for lst in LST]) |
|
194 |
#denominator = '(%s)' % '+'.join(['if(!isnull(%s))' % lst for lst in LST]) |
|
195 |
#gs.mapcalc('LST_mean_%s = %s / %s' % (tile, numerator, denominator)) |
|
196 |
calc_mean(LST, 'LST_mean_%s_%d_%03d_%03d' % (tile, year, start_doy, end_doy)) |
|
195 |
# load and qc-adjust all daily LSTs, return list of the GRASS map names |
|
196 |
LST = [load_qc_adjusted_lst(hdf) for hdf in hdfs] |
|
197 |
# calculate mean and nobs rasters |
|
198 |
calc_clim(LST, 'LST_%s_%d_%03d_%03d' % (tile, year, start_doy, end_doy)) |
|
197 | 199 |
# clean up |
198 | 200 |
gs.run_command('g.remove', rast=','.join(LST)) |
201 |
gs.os.environ['GRASS_OVERWRITE'] = '0' |
Also available in: Unified diff
wrote function to look up local HDF files for given tile/year/day-range