Project

General

Profile

« Previous | Next » 

Revision 7a06885b

Added by Jim Regetz over 12 years ago

  • ID 7a06885b334538aadc833759175e9d707eeae44e

numerous comment, format, and variable name changes

View differences:

terrain/research/oregon/arcgis/v2/multiscalesmooth.py
1 1
#!/usr/bin/env python
2 2

  
3 3
############################################################################
4
# MODULE:   r.plane for GRASS 5.7; based on r.plane for GRASS 5
4
# MODULE:   r.multiscalesmooth for GRASS
5 5
# AUTHOR(S):    Original algorithm and AML implementation by John
6 6
#               Gallant. Translation to GRASS/Python by Jim Regetz.
7 7
# REFERENCES:
......
61 61
# create set to store names of temporary maps to be deleted upon exit
62 62
tmp_rast = set()
63 63

  
64
#@atexit.register
65 64
def cleanup():
66 65
    gs.message("Removing temporary files...", flag='i')
67 66
    for rast in tmp_rast:
68 67
        gs.run_command("g.remove", rast=rast, quiet=True)
69 68

  
70
def report():
71
    gs.debug("raster report...")
72
    for rast in tmp_rast:
73
        info = gs.raster_info(rast)
74
        gs.debug('%s: %s (%f-%f)' % (rast, info['datatype'],
75
            info['min'], info['max']))
76

  
77 69
def coarsen_region(factor=3):
78 70
    gs.run_command('g.region',
79 71
        rows=gs.region()['rows']/3,
......
91 83
    chisqb = (-5.871 - 3.675 * math.log10(alpha) + 4.690 *
92 84
        math.pow(alpha, 0.3377))
93 85

  
94
    # set number of aggregation levels and neighborhood size
95
    NUM_LEVELS = 4
96
    NUM_CELLS = 3
97

  
98 86
    gs.message("Preparing initial grids", flag='i')
99 87

  
100 88
    # create copy of initial grid of values using current region
101 89
    gs.mapcalc('z0 = ${input}', input=input, quiet=True)
102 90
    tmp_rast.add('z0')
103 91

  
92
    # set number of aggregation levels and neighborhood size
93
    NUM_LEVELS = 4
94
    NUM_CELLS = 3
95

  
104 96
    # expand region to accommodate integer number of cells at coarsest
105
    # level, by adding cells to all sides (with one extra on top/right
106
    # if an odd number of cells needs to be added)
97
    # level, by adding roungly equal number of cells on either side
98
    # (with one extra on top/right if an odd number of cells is needed)
107 99
    max_size = NUM_CELLS**NUM_LEVELS
108 100
    region = gs.region()
109 101
    extra = region['cols'] % max_size
......
117 109
    else:
118 110
        addy = 0.0
119 111
    gs.run_command('g.region', flags='a',
120
        w = region['w']-math.floor(addx)*region['ewres'],
121
        e = region['e']+math.ceil(addx)*region['ewres'],
122
        s = region['s']-math.floor(addy)*region['nsres'],
123
        n = region['n']+math.ceil(addy)*region['nsres'])
112
        w = region['w'] - math.floor(addx) * region['ewres'],
113
        e = region['e'] + math.ceil(addx) * region['ewres'],
114
        s = region['s'] - math.floor(addy) * region['nsres'],
115
        n = region['n'] + math.ceil(addy) * region['nsres'])
124 116
    gs.debug('\n'.join(gs.parse_command('g.region', 'up').keys()))
125 117

  
126 118
    # create initial grid of variances; sd can be a raster or a constant
......
207 199
            vm = '(1.0/w)', mv = '(n/w)', quiet=True)
208 200
        tmp_rast.add('v%d' % i)
209 201

  
210
    # arbitrary value used to fill null variance cells
202
    # get arbitrarily large value to fill null variance cells
211 203
    bigvar = gs.raster_info('v0')['max'] * 10
212 204

  
213 205
    # prep for refinement phase
......
232 224
            overwrite=True, quiet=True)
233 225

  
234 226
        # create smoothed higher resolution versions of z and v
235
        #TODO: is this the same as circle with radius 2 in arc?
236
        #TODO: what's happening at the edges here???
237
        #TODO: is order of ops faithful to aml w.r.t resolution changes?
227
        # using weight matrix equivalent to ArcGIS circle with radius 2
238 228
        gs.run_command('r.neighbors', flags='c', input=smooth,
239 229
            output='zs', method='average', size=5, overwrite=True,
240 230
            quiet=True)
......
253 243
        gs.mapcalc('v.smooth = 1 / (1/${v_c} + 1/vs)', v_c = v_c,
254 244
            quiet=True)
255 245

  
256
    if (1 <= gs.debug_level):
257
        report()
258

  
259 246
    cleanup()
260 247
    return None
261 248

  
terrain/research/oregon/arcgis/v2/multiscalesmooth9a_clean.R
1
# run in GRID, not ARC
2
# &run .aml ingrid sd prob bbox
3
# sd 0.0001 prob 0.05
1
# Multiscale raster smoother function designed for use with noisy DEMs.
2
# Original algorithm and AML implementation by John Gallant. Translation
3
# to R (relying on raster package functionality) by Jim Regetz, based on
4
# AML script 'multiscalesmooth9a_clean.aml'.
4 5
#
5
# 9a - limit to 4 steps, see if that has any significant deterioration of smoothing performance. Should fix
6
# the problem with islands and headlands - turns out also need to remove water except
7
# for one cell adjacent to land, and give that a higher uncertainty. See cluster_multiscalesmooth9a_clean_216
8
#
9
# Version 9:
10
# Focus on implementing identical algorithm to directsmooth2 using multiscale method i.e. aggregating by factor of 3
11
# from already aggregated data, rather than from original resolution each time.
12
#
13
# Version 8:
14
# WARNING: have not yet checked that the additional weighting of the gaussian smoothing is not messing with
15
# the calculations of variance etc.
16
# Replaced simple 3x3 aggregation with gaussian smoothing
17
# Kernel is chosen to give appropriate level of smoothing and produce a fairly accurate approximation of the smoothed
18
# surface by interpolation of the coarser scale smoothed values
19
# Details in gaussian.xls on john's laptop
20
#
21
# Version 7:
22
# Further reworking of how the final values are selected - a mean is a candidate if its associated variance
23
# is less than the mean sample uncertainty, and the mean with the lowest variance among the candidates is the chosen one.
24
# Implement that in the nested sense by taking the lowest group variance divided by the chi^2 value, and its associated mean variance,
25
# and if that is lower than the data point variance the
26
#
27
# approximate critical value of chi^2/N with N degrees of freedom at 5% level as 1 + 2.45/sqrt(N) + 0.55/N
28
# for 1% level use 1 + 3.4/sqrt(N) + 2.4/N
29
#
30
# Version 6:
31
# Done from scratch after careful working through of theory.
32
# ingrid is the (potentially sparse) grid of data to be smoothed and
33
# interpolated, which can be of different extent to the output
34
# (resolution is assumed to be the same, could adapt this to relax that)
35
#
36
# var can be a constant or a grid
37
#
38
# bbox can be either a grid name or the 'xmin ymin xmax ymax' parameters
39
# for setwindow
40

  
41
# REGETZ NOTES
42
# - it looks like Ming used sd=0.001 (based on the Arc 'log' file in the
43
#   topo experimental directory)
6
# Jim Regetz
7
# NCEAS
8
# Created on 22-Mar-2012
44 9

  
45 10
library(raster)
46 11

  
47
#&type NB - using standard deviation as noise specification now, not variance!
48

  
49
multiscalesmooth <- function(ingrid, sd=0.0001 , prob=0.05, bbox) {
50
    # ingrid: grid to smooth
51
    # sd: stdev
52
    # prob: prob
12
multiscalesmooth <- function(ingrid, sd=0.0001, alpha=0.05, bbox) {
13
    # ingrid: RasterLayer to smooth
14
    # sd: numeric constant or RasterLayer
15
    # alpha: alpha level for chi-square critical value
53 16
    # bbox: optional extent object used to subset ingrid
54 17

  
55 18
    # subset ingrid if desired
......
57 20
        ingrid <- crop(ingrid, bbox)
58 21
    }
59 22

  
60
    # set number of levels of aggregation
23
    # set number of aggregation levels and neighborhood size
61 24
    NUM.LEVELS <- 4
62
    # set aggregation factor
63
    AGG.FACTOR <- 3
25
    NUM.CELLS <- 3
64 26

  
65 27
    # expand grid to accommodate integer number of cells at coarsest
66 28
    # level, by adding roungly equal number of cells on either side
67
    # (with one extra on top/right if an odd number of cells needs to be
68
    # added)
69
    max.size <- AGG.FACTOR^NUM.LEVELS
29
    # (with one extra on top/right if an odd number of cells is needed)
30
    max.size <- NUM.CELLS^NUM.LEVELS
70 31
    addx <- if (0 < (extra<-ncol(ingrid)%%max.size)) {
71 32
            (max.size-extra)/2
72 33
        } else {
......
78 39
            0
79 40
        }
80 41
    full.extent <- extent(
81
        xmin(ingrid)-floor(addx)*xres(ingrid),
82
        xmax(ingrid)+ceiling(addx)*xres(ingrid),
83
        ymin(ingrid)-floor(addy)*yres(ingrid),
84
        ymax(ingrid)+ceiling(addy)*yres(ingrid)
42
        xmin(ingrid) - floor(addx) * xres(ingrid),
43
        xmax(ingrid) + ceiling(addx) * xres(ingrid),
44
        ymin(ingrid) - floor(addy) * yres(ingrid),
45
        ymax(ingrid) + ceiling(addy) * yres(ingrid)
85 46
        )
86 47

  
87 48
    # create grids
88 49

  
89
    # NB - only calculating sample variances here, not variances of estimated means.
90
    # Also note that v0_bg is an uncertainty, not a sample variance
91
    # and v1_bg is total variances, but both are labelled as "between-group" to simplify the smoothing
92

  
93 50
    # create lists to hold the series of successively coarsened grids of
94 51
    # values and variances, respectively; the first element of each is
95 52
    # the grid at the original input resolution
......
111 68

  
112 69
    # set initial "group variance" to individual msmt variance (noise)
113 70
    v.g = v[[1]]
114
    # weighting for aggregation, based on total variance
71
    # weights for aggregation, based on total variance
115 72
    w <- calc(v[[1]], function(x) ifelse(is.na(x), 0, 1/x))
73
    # squared weights
116 74
    wsq = w^2
117 75
    # effective number of measurements
118 76
    n <- calc(z[[1]], function(x) ifelse(!is.na(x), 1, 0))
119 77

  
120
    bigvar <- cellStats(v[[1]], max)
121

  
122
    #setcell minof
123

  
124 78
    # aggregate to broader scales
125 79
    for (i in 1+seq.int(NUM.LEVELS)) {
126 80

  
......
140 94
        # calc variance-weighted neighborhood mean
141 95
        z[[i]] <- aggregate(w.prev * z[[i-1]], 3, sum) / w
142 96
        # calc between-cell variance, taken over neighborhood
143
        hdiff <- z[[i-1]] - disaggregate(z[[i]], 3)
144
        v.bg <- aggregate(w.prev * hdiff^2, 3, sum) / w
97
        zdiff <- z[[i-1]] - disaggregate(z[[i]], 3)
98
        v.bg <- aggregate(w.prev * zdiff^2, 3, sum) / w
145 99
        # calc wtd avg of within-cell variance, taken over neighborhood
146 100
        if (i==2) {
147
            v.wg <- n - n # zero, but with window and cell size set for us
101
            v.wg <- n - n # zero, but with correct window and cell size
148 102
        } else {
149 103
            v.wg <- aggregate(w.prev * v.g.prev, 3, sum) / w
150 104
        }
......
157 111
        mv <- n / w
158 112

  
159 113
        # calc chisq critical values
160
        chisq <- calc(n.eff, function(n) qchisq(0.05, n-1,
114
        chisq <- calc(n.eff, function(n) qchisq(alpha, n-1,
161 115
            lower=FALSE)/(n-1))
162 116
        # set coarsened cell variances: if group variance is small
163 117
        # relative to noise variance, use variance of the mean instead
......
167 121

  
168 122
    }
169 123

  
170
    bigvar  <- bigvar * 10
171

  
172
    #copy z[NUM.LEVELS] hs[NUM.LEVELS]
173
    #copy v[NUM.LEVELS] vs[NUM.LEVELS]
174
    #kill z[NUM.LEVELS]
175
    #kill v[NUM.LEVELS]
176
    #setcell hs[NUM.LEVELS]
177
    #setwindow hs[NUM.LEVELS]
178

  
179
    # smooth, refine and combine each layer in turn
180
    #hs <- z[[NUM.LEVELS+1]]
181
    #vs <- v[[NUM.LEVELS+1]]
182
    hs <- z
183
    vs <- v
184
    #todo: is this the same as circle with radius 2 in arc???
185
    #circle2 <- matrix(c(0,1,0, 1,1,1, 0,1,0), nrow=3)
186
    circle2 <- matrix(c(0,0,1,0,0, 0,1,1,1,0, 1,1,1,1,1, 0,1,1,1,0, 0,0,1,0,0), nrow=5)
124
    # get arbitrarily large value to fill null variance cells
125
    bigvar <- cellStats(v[[1]], max) * 10
126

  
127
    # prep for refinement phase
128
    z.smooth <- z[[NUM.LEVELS+1]]
129
    v.smooth <- v[[NUM.LEVELS+1]]
130
    # create weight matrix equivalent to ArcGIS circle with radius 2
131
    circle2 <- matrix(c(0,0,1,0,0,
132
                        0,1,1,1,0,
133
                        1,1,1,1,1,
134
                        0,1,1,1,0,
135
                        0,0,1,0,0), nrow=5)
187 136
    circle2[circle2==0] <- NA
188 137

  
138
    # refine, smooth, and combine each layer in turn
189 139
    for (j in 1+rev(seq.int(NUM.LEVELS))) {
190 140

  
191
        message("Refine from ", j, " to ", j-1)
141
        message("Refining from ", j, " to ", j-1)
192 142

  
193
        # for the first stage where the coarser grid is refined and smoothed, set window to the coarse grid
194
        #setcell z[j-1]
195
        #setwindow maxof
196

  
197
        # create smoothed higher resolution versions of z and v_bg, hopefully with no nulls!
198
        hs.tmp <- focal(disaggregate(hs[[j]], 3), w=circle2, fun=mean,
143
        # create smoothed higher resolution versions of z and v
144
        zs <- focal(disaggregate(z.smooth, 3), w=circle2, fun=mean,
145
            pad=TRUE, padValue=NA, na.rm=TRUE)
146
        vs <- focal(disaggregate(v.smooth, 3), w=circle2, fun=mean,
199 147
            pad=TRUE, padValue=NA, na.rm=TRUE)
200
        vs.tmp <- focal(disaggregate(vs[[j]], 3), w=circle2, fun=mean,
201
           pad=TRUE, padValue=NA, na.rm=TRUE)
202 148

  
203 149
        # create no-null version of finer z and v
204
        h_c <- calc(z[[j-1]], function(x) ifelse(is.na(x), 0, x))
150
        z_c <- calc(z[[j-1]], function(x) ifelse(is.na(x), 0, x))
205 151
        v_c <- calc(v[[j-1]], function(x) ifelse(is.na(x), bigvar, x))
206 152

  
207
        # combine two values using least variance
208
        hs[[j-1]] <- (h_c/v_c + hs.tmp/vs.tmp ) / (1/v_c + 1/vs.tmp)
209
        vs[[j-1]] <- 1 / (1/v_c + 1/vs.tmp)
153
        # explicitly clean up, in case it helps
154
        z[[j-1]] <- NULL
155
        v[[j-1]] <- NULL
210 156

  
211
#todo: mimic all the AML setwindow/setcell stuff???
212
#        hs[[j-1]] <- expand(hs[[j-1]], 4)
213
#        vs[[j-1]] <- expand(vs[[j-1]], 4)
157
        # combine two values using least variance
158
        z.smooth <- (z_c/v_c + zs/vs ) / (1/v_c + 1/vs)
159
        v.smooth <- 1 / (1/v_c + 1/vs)
214 160

  
215 161
    }
216 162

  
217
    result <- crop(stack(hs[[1]], vs[[1]]), extent(ingrid))
218
    layerNames(result) <- c("hs", "vs")
163
    result <- crop(stack(z.smooth, v.smooth, extent(ingrid))
164
    layerNames(result) <- c("zs", "vs")
219 165
    return(result)
220 166

  
221 167
}

Also available in: Unified diff