Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Editing branch #23

Open
wants to merge 19 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
239 changes: 229 additions & 10 deletions geostatspy/geostats.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def backtr(df,vcol,vr,vrg,zmin,zmax,ltail,ltpar,utail,utpar):
:param utpar: upper tail extrapolation parameter
:return: TODO
"""

#comment
EPSLON=1.0e-20
nd = len(df); nt = len(vr) # number of data to transform and number of data in table
backtr = np.zeros(nd)
Expand Down Expand Up @@ -2196,6 +2196,222 @@ def nscore(

return ns, vr, wt_ns

# *** all parameters have been copied over form kb2d with respective z parameter added if necessary
# we can probably delete a couple of these if we don't do block kriging
def kt3d (
df,
xcol,
ycol,
zcol,
vcol,
tmin,
tmax,
nx,
xmn,
xsiz,
ny,
ymn,
ysiz,
nz,
zmn,
zsiz,
nxdis,
nydis,
nzdis,
ndmin,
ndmax,
radius,
ktype,
skmean,
vario,
):
UNEST = -999.
EPSLON = 1.0e-10
PMX = 9999.0
MAXSAM = ndmax + 1
MAXDIS = nxdis * nydis * nzdis
MAXKD = MAXSAM + 1
MAXKRG = MAXKD * MAXKD

# Allocate the needed memory:

# used for block kriging displacements
xdb = np.zeros(MAXDIS)
ydb = np.zeros(MAXDIS)
zdb = np.zeros(MAXDIS)
# temp arrays to hold nearest neighbor search results
xa = np.zeros(MAXSAM)
ya = np.zeros(MAXSAM)
za = np.zeros(MAXSAM)
vra = np.zeros(MAXSAM)
# result grid meshes for estimated values and estimation variance
kmap = np.zeros((nx,ny,nz))
vmap = np.zeros((nx,ny,nz))

# trim values outside tmin and tmax
df_extract = df.loc[(df[vcol] >= tmin) & (df[vcol] <= tmax)]
nd = len(df_extract)
ndmax = min(ndmax,nd)
x = df_extract[xcol].values
y = df_extract[ycol].values
z = df_extract[zcol].values
vr = df_extract[vcol].values

# set up tree for nearest neighbor search
dp = list((z[i], y[i], x[i]) for i in range(0,nd))
data_locs = np.column_stack((z, y, x))
tree = sp.cKDTree(data_locs, leafsize=16, compact_nodes=True, copy_data=False, balanced_tree=True) #why this error?

# Summary statistics for the data after trimming
avg = vr.mean()
stdev = vr.std()
ss = stdev**2.0 # variance
vrmin = vr.min()
vrmax = vr.max()

# load the variogram
nst = vario['nst'] # num structures
cc = np.zeros(nst) # variance contribution
aa = np.zeros(nst) # major range
it = np.zeros(nst) # type vario structure
aa = np.zeros(nst) # major range
ang_azi = np.zeros(nst) # azimuth
ang_dip = np.zeros(nst) # dip
anis = np.zeros(nst) # anistropy ratio w/ minor range
anis_v = np.zeros(nst) # anistropy ratio w/ vertical range

# only works w/ nst == 1 or nst == 2
c0 = vario['nug']
it[0] = vario['it1']
cc[0] = vario['cc1']
ang_azi[0] = vario['azi1']
ang_dip[0] = vario['dip1']
aa[0] = vario['hmax']
anis[0] = vario['hmed1']/vario['hmax1']
anis_v[0] = vario['hmin1']/vario['hmax1']
if nst == 2:
cc[1] = vario['cc2']
it[1] = vario['it2']
ang_azi[1] = vario['azi2']
ang_dip[1] = vario['dip1']
aa[1] = vario['hmax']
anis[1] = vario['hmed2']/vario['hmax2']
anis_v[1] = vario['hmin2']/vario['hmax2']

rotmat, maxcov = setup_rotmat_3D(c0, nst, it, cc, ang_azi, ang_dip, PMX)
cbb = maxcov
unbias = maxcov

# limited to simple kriging for now
ktype = 0

# TODO Set up discretization points per block (block kriging)

# main loop over all points:
nk = 0
ak = 0.0
vk = 0.0
for iz in range(0,nz):
zloc = zmn + (iz-0)*zsiz
for iy in range(0,ny):
yloc = ymn + (iy-0)*ysiz
for ix in range (0,nz): #triple nested for loop that loops through points and adds to matrices
xloc = xmn + (ix-0)*xsiz
current_node = (zloc, yloc,xloc) # xloc, yloc, zloc centroid of current cell

# Find the nearest samples within each octant:
na = -1 # accounting for 0 as first index
dist, nums = tree.query(current_node,ndmax) # use kd tree for fast nearest data search
# remove any data outside search radius
na = len(dist)
nums = nums[dist<radius]
dist = dist[dist<radius]
na = len(dist)

# Is there enough samples?
if na + 1 < ndmin: # accounting for min index of 0
est = UNEST
estv = UNEST
# not enough samples, mark as "unestimated"
print('UNEST at ' + str(ix) + ',' + str(iy) + ','+str(iz))
else:
# Put coordinates and values of neighborhood samples into xa,ya,za,vra:
for ia in range(0,na):
jj = int(nums[ia])
xa[ia] = x[jj] #why this error?
ya[ia] = y[jj]
za[ia] = z[jj]
vra[ia] = vr[jj]

# Handle the situation of only one sample:
if na == 0: # accounting for min index of 0 - one sample case na = 0
cb1 = cova3(xa[0],ya[0],za[0],xa[0],ya[0],za[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)

xx = xa[0] - xloc
yy = ya[0] - yloc
zz = za[0] - zloc

# Establish Right Hand Side Covariance:
cb = cova3(xx,yy,zz,xdb[0],ydb[0],zdb[i],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)

if ktype == 0:
s[0] = cb/cbb
est = s[0]*vra[0] + (1.0-s[0])*skmean
estv = cbb - s[0] * cb
# else:
# est = vra[0]
# estv = cbb - 2.0*cb + cb1
else:
# Solve the Kriging System with more than one sample:
neq = na + ktype # accounting for first index of 0
# print('NEQ' + str(neq))
nn = (neq + 1)*neq/2

# Set up kriging matrices:
a = np.zeroes((na,na))
r = np.zeroes(na)
rr = np.zeroes(na)

# Establish Left Hand Side Covariance Matrix:

for j in range(0,na):
for i in range(0,na):
# add covariance into left-hand square matrix
a[j][i] = cova3(xa[i],ya[i],za[i],xa[j],ya[j],za[j],nst,c0,PMX,cc,aa,it,ang_azi,anis,rotmat,maxcov)
# add covariance into right-hand column matrix
r[j] = cova3(xloc,yloc,zloc,xa[j],ya[j],za[j],nst,c0,PMX,cc,aa,it,ang_azi,anis,rotmat,maxcov)

# solve the kriging system
s = ksol_numpy(neq,a,r)
ising = 0

# Write a warning if the matrix is singular:
if ising != 0:
print('WARNING KT3D: singular matrix')
est = UNEST
estv = UNEST
else:
# Compute the estimate and the kriging variance:
est = 0.0
estv = cbb
sumw = 0.0
# if ktype == 1:
# estv = estv - (s[na])*unbias
for i in range(0,na):
sumw = sumw + s[i]
est = est + s[i]*vra[i]
estv = estv - s[i]*rr[i]
if ktype == 0:
est = est + (1.0-sumw)*skmean
# add estimation variance and estimated value to maps
kmap[nz-iz-1, ny-iy-1,ix] = est
vmap[nz-iz-1, ny-iy-1,ix] = estv
if est > UNEST:
nk = nk + 1
ak = ak + est
vk = vk + est*est

return kmap, vmap

def kb2d(
df,
Expand Down Expand Up @@ -2258,15 +2474,19 @@ def kb2d(

# load the variogram
nst = vario['nst']
cc = np.zeros(nst); aa = np.zeros(nst); it = np.zeros(nst)
ang = np.zeros(nst); anis = np.zeros(nst)
cc = np.zeros(nst);
aa = np.zeros(nst);
it = np.zeros(nst)
ang = np.zeros(nst);
anis = np.zeros(nst)

c0 = vario['nug'];
cc[0] = vario['cc1']; it[0] = vario['it1']; ang[0] = vario['azi1'];
aa[0] = vario['hmaj1']; anis[0] = vario['hmin1']/vario['hmaj1'];
if nst == 2:
cc[1] = vario['cc2']; it[1] = vario['it2']; ang[1] = vario['azi2'];
aa[1] = vario['hmaj2']; anis[1] = vario['hmin2']/vario['hmaj2'];


# Allocate the needed memory:
xdb = np.zeros(MAXDIS)
Expand Down Expand Up @@ -2299,7 +2519,7 @@ def kb2d(
# Summary statistics for the data after trimming
avg = vr.mean()
stdev = vr.std()
ss = stdev**2.0
ss = stdev**2.0 # variance
vrmin = vr.min()
vrmax = vr.max()

Expand All @@ -2324,6 +2544,7 @@ def kb2d(
xdb[i] = xloc
ydb[i] = yloc


# Initialize accumulators:
cbb = 0.0
rad2 = radius*radius
Expand Down Expand Up @@ -2353,7 +2574,7 @@ def kb2d(
yloc = ymn + (iy-0)*ysiz
for ix in range(0,nx):
xloc = xmn + (ix-0)*xsiz
current_node = (yloc,xloc)
current_node = (yloc,xloc) # xloc, yloc centroid of cell

# Find the nearest samples within each octant: First initialize
# the counter arrays:
Expand Down Expand Up @@ -2421,16 +2642,17 @@ def kb2d(
for i in range(0,na): # was j - want full matrix
iin = iin + 1
a[iin] = cova2(xa[i],ya[i],xa[j],ya[j],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
if ktype == 1:
if ktype == 1:
iin = iin + 1
a[iin] = unbias
xx = xa[j] - xloc
yy = ya[j] - yloc

# Establish Right Hand Side Covariance:
if ndb <= 1:
# ndb <= 1 -> use single value
cb = cova2(xx,yy,xdb[0],ydb[0],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
else:
else:
cb = 0.0
for j1 in range(0,ndb):
cb = cb + cova2(xx,yy,xdb[j1],ydb[j1],nst,c0,PMX,cc,aa,it,ang,anis,rotmat,maxcov)
Expand Down Expand Up @@ -4470,9 +4692,6 @@ def cova3(x1, y1, z1, x2, y2, z2, nst, c0, pmx, cc, aa, it, anis, anis_v, rotmat
cova3_ = cova3_ + cov1
return cova3_





def gamv_3D(
df,
Expand Down
Loading