-
Notifications
You must be signed in to change notification settings - Fork 185
/
Copy pathkb2d.py
317 lines (281 loc) · 11.8 KB
/
kb2d.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
import math # for trig and constants
import scipy.spatial as sp # for fast nearest nearbour search
from numba import jit # for precompile speed up
# GSLIB's KB2D program (Deutsch and Journel, 1998) converted from the original Fortran to Python
# translated by Michael Pyrcz, the University of Texas at Austin (Jan, 2019)
def kb2d(df,xcol,ycol,vcol,tmin,tmax,nx,xmn,xsiz,ny,ymn,ysiz,nxdis,nydis,
ndmin,ndmax,radius,ktype,skmean,vario):
# Constants
UNEST = -999.
EPSLON = 1.0e-10
VERSION = 2.907
first = True
PMX = 9999.0
MAXSAM = ndmax + 1
MAXDIS = nxdis * nydis
MAXKD = MAXSAM + 1
MAXKRG = MAXKD * MAXKD
# load the variogram
nst = vario['nst']
cc = np.zeros(nst); aa = np.zeros(nst); it = np.zeros(nst)
ang = np.zeros(nst); anis = np.zeros(nst)
c0 = vario['nug'];
cc[0] = vario['cc1']; it[0] = vario['it1']; ang[0] = vario['azi1'];
aa[0] = vario['hmaj1']; anis[0] = vario['hmin1']/vario['hmaj1'];
if nst == 2:
cc[1] = vario['cc2']; it[1] = vario['it2']; ang[1] = vario['azi2'];
aa[1] = vario['hmaj2']; anis[1] = vario['hmin2']/vario['hmaj2'];
# Allocate the needed memory:
xdb = np.zeros(MAXDIS)
ydb = np.zeros(MAXDIS)
xa = np.zeros(MAXSAM)
ya = np.zeros(MAXSAM)
vra = np.zeros(MAXSAM)
dist = np.zeros(MAXSAM)
nums = np.zeros(MAXSAM)
r = np.zeros(MAXKD)
rr = np.zeros(MAXKD)
s = np.zeros(MAXKD)
a = np.zeros(MAXKRG)
kmap = np.zeros((nx,ny))
vmap = np.zeros((nx,ny))
# Load the data
df_extract = df.loc[(df[vcol] >= tmin) & (df[vcol] <= tmax)] # trim values outside tmin and tmax
nd = len(df_extract)
ndmax = min(ndmax,nd)
x = df_extract[xcol].values
y = df_extract[ycol].values
vr = df_extract[vcol].values
# Make a KDTree for fast search of nearest neighbours
dp = list((y[i], x[i]) for i in range(0,nd))
data_locs = np.column_stack((y,x))
tree = sp.cKDTree(data_locs, leafsize=16, compact_nodes=True, copy_data=False, balanced_tree=True)
# Summary statistics for the data after trimming
avg = vr.mean()
stdev = vr.std()
ss = stdev**2.0
vrmin = vr.min()
vrmax = vr.max()
# Set up the discretization points per block. Figure out how many
# are needed, the spacing, and fill the xdb and ydb arrays with the
# offsets relative to the block center (this only gets done once):
ndb = nxdis * nydis
if ndb > MAXDIS:
print('ERROR KB2D: Too many discretization points ')
print(' Increase MAXDIS or lower n[xy]dis')
return kmap
xdis = xsiz / max(float(nxdis),1.0)
ydis = ysiz / max(float(nydis),1.0)
xloc = -0.5*(xsiz+xdis)
i = -1 # accounting for 0 as lowest index
for ix in range(0,nxdis):
xloc = xloc + xdis
yloc = -0.5*(ysiz+ydis)
for iy in range(0,nydis):
yloc = yloc + ydis
i = i+1
xdb[i] = xloc
ydb[i] = yloc
# Initialize accumulators:
cbb = 0.0
rad2 = radius*radius
# Calculate Block Covariance. Check for point kriging.
rotmat, maxcov = setup_rotmat(c0,nst,it,cc,ang,PMX)
cov = cova2(xdb[0],ydb[0],xdb[0],ydb[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
# Keep this value to use for the unbiasedness constraint:
unbias = cov
first = False
if ndb <= 1:
cbb = cov
else:
for i in range(0,ndb):
for j in range(0,ndb):
cov = cova2(xdb[i],ydb[i],xdb[j],ydb[j],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
if i == j:
cov = cov - c0
cbb = cbb + cov
cbb = cbb/real(ndb*ndb)
# MAIN LOOP OVER ALL THE BLOCKS IN THE GRID:
nk = 0
ak = 0.0
vk = 0.0
for iy in range(0,ny):
yloc = ymn + (iy-0)*ysiz
for ix in range(0,nx):
xloc = xmn + (ix-0)*xsiz
current_node = (yloc,xloc)
# Find the nearest samples within each octant: First initialize
# the counter arrays:
na = -1 # accounting for 0 as first index
dist.fill(1.0e+20)
nums.fill(-1)
dist, nums = tree.query(current_node,ndmax) # use kd tree for fast nearest data search
na = len(dist) - 1
# Is there enough samples?
if na + 1 < ndmin: # accounting for min index of 0
est = UNEST
estv = UNEST
print('UNEST at ' + str(ix) + ',' + str(iy))
else:
# Put coordinates and values of neighborhood samples into xa,ya,vra:
for ia in range(0,na+1):
jj = int(nums[ia])
xa[ia] = x[jj]
ya[ia] = y[jj]
vra[ia] = vr[jj]
# Handle the situation of only one sample:
if na == 0: # accounting for min index of 0 - one sample case na = 0
cb1 = cova2(xa[0],ya[0],xa[0],ya[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
xx = xa[0] - xloc
yy = ya[0] - yloc
# Establish Right Hand Side Covariance:
if ndb <= 1:
cb = cova2(xx,yy,xdb[0],ydb[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
else:
cb = 0.0
for i in range(0,ndb):
cb = cb + cova2(xx,yy,xdb[i],ydb[i],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
dx = xx - xdb(i)
dy = yy - ydb(i)
if (dx*dx+dy*dy) < EPSLON:
cb = cb - c0
cb = cb / real(ndb)
if ktype == 0:
s[0] = cb/cbb
est = s[0]*vra[0] + (1.0-s[0])*skmean
estv = cbb - s[0] * cb
else:
est = vra[0]
estv = cbb - 2.0*cb + cb1
else:
# Solve the Kriging System with more than one sample:
neq = na + 1 + ktype # accounting for first index of 0
nn = (neq + 1)*neq/2
# Set up kriging matrices:
iin=-1 # accounting for first index of 0
for j in range(0,na+1):
# Establish Left Hand Side Covariance Matrix:
for i in range(0,na+1): # was j - want full matrix
iin = iin + 1
a[iin] = cova2(xa[i],ya[i],xa[j],ya[j],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
xx = xa[j] - xloc
yy = ya[j] - yloc
# Establish Right Hand Side Covariance:
if ndb <= 1:
cb = cova2(xx,yy,xdb[0],ydb[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
else:
cb = 0.0
for j1 in range(0,ndb):
cb = cb + cova2(xx,yy,xdb[j1],ydb[j1],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
dx = xx - xdb[j1]
dy = yy - ydb[j1]
if (dx*dx+dy*dy) < EPSLON:
cb = cb - c0
cb = cb / real(ndb)
r[j] = cb
rr[j] = r[j]
# Set the unbiasedness constraint:
if ktype == 1:
for i in range(0,na+1):
iin = iin + 1
a[iin] = unbias
iin = iin + 1
a[iin] = 0.0
r[neq] = unbias
rr[neq] = r[neq]
# Solve the Kriging System:
s = ksol_numpy(neq,a,r)
ising = 0 # need to figure this out
# Write a warning if the matrix is singular:
if ising != 0:
print('WARNING KB2D: singular matrix')
print(' for block' + str(ix) + ',' + str(iy)+ ' ')
est = UNEST
estv = UNEST
else:
# Compute the estimate and the kriging variance:
est = 0.0
estv = cbb
sumw = 0.0
if ktype == 1:
estv = estv - real(s[na+1])*unbias
for i in range(0,na+1):
sumw = sumw + s[i]
est = est + s[i]*vra[i]
estv = estv - s[i]*rr[i]
if ktype == 0:
est = est + (1.0-sumw)*skmean
kmap[ny-iy-1,ix] = est
vmap[ny-iy-1,ix] = estv
if est > UNEST:
nk = nk + 1
ak = ak + est
vk = vk + est*est
# END OF MAIN LOOP OVER ALL THE BLOCKS:
if nk >= 1:
ak = ak / float(nk)
vk = vk/float(nk) - ak*ak
print(' Estimated ' + str(nk) + ' blocks ')
print(' average ' + str(ak) + ' variance ' + str(vk))
return kmap, vmap
@jit(nopython=True) # all NumPy array operations included in this function for precompile with NumBa
def setup_rotmat(c0,nst,it,cc,ang,PMX):
DTOR=3.14159265/180.0; EPSLON=0.000000; PI=3.141593
# The first time around, re-initialize the cosine matrix for the
# variogram structures:
rotmat = np.zeros((4,nst))
maxcov = c0
for js in range(0,nst):
azmuth = (90.0-ang[js])*DTOR
rotmat[0,js] = math.cos(azmuth)
rotmat[1,js] = math.sin(azmuth)
rotmat[2,js] = -1*math.sin(azmuth)
rotmat[3,js] = math.cos(azmuth)
if it[js] == 4:
maxcov = maxcov + PMX
else:
maxcov = maxcov + cc[js]
return rotmat, maxcov
@jit(nopython=True) # all NumPy array operations included in this function for precompile with NumBa
def cova2(x1,y1,x2,y2,nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov):
DTOR=3.14159265/180.0; EPSLON=0.000000; PI=3.141593
# Check for very small distance:
dx = x2-x1
dy = y2-y1
# print(dx,dy)
if (dx*dx+dy*dy) < EPSLON:
cova2 = maxcov
return cova2
# Non-zero distance, loop over all the structures:
cova2 = 0.0
for js in range(0,nst):
# print(js)
# print(rotmat)
# Compute the appropriate structural distance:
dx1 = (dx*rotmat[0,js] + dy*rotmat[1,js])
dy1 = (dx*rotmat[2,js] + dy*rotmat[3,js])/anis[js]
h = math.sqrt(max((dx1*dx1+dy1*dy1),0.0))
if it[js] == 1:
# Spherical model:
hr = h/aa[js]
if hr < 1.0:
cova2 = cova2 + cc[js]*(1.-hr*(1.5-.5*hr*hr))
elif it[js] == 2:
# Exponential model:
cova2 = cova2 + cc[js]*np.exp(-3.0*h/aa[js])
elif it[js] == 3:
# Gaussian model:
hh=-3.0*(h*h)/(aa[js]*aa[js])
cova2 = cova2 +cc[js]*np.exp(hh)
elif it[js] == 4:
# Power model:
cov1 = PMX - cc[js]*(h**aa[js])
cova2 = cova2 + cov1
return cova2
def ksol_numpy(neq,a,r): # using Numpy methods
a = a[0:neq*neq] # trim the array
a = np.reshape(a,(neq,neq)) # reshape to 2D
ainv = linalg.inv(a) # invert matrix
r = r[0:neq] # trim the array
s = np.matmul(ainv,r) # matrix multiplication
return s