Skip to content

Commit

Permalink
Merge branch 'dev' of https://github.com/mbakker7/timml into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mbakker7 committed Feb 13, 2024
2 parents dfe8dea + 9c261b7 commit 9f447f2
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 9 deletions.
45 changes: 40 additions & 5 deletions tests/test_besselaes.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,14 @@
# @pytest.mark.skip(reason="no fortran extension by default")
def potbesldho(x):
pot = besselaesnew.potbesldho(
2.0, 1.0, complex(-3.0, -1.0), complex(2.0, 2.0), [0.0, 2.0, 11.0], x, 1, 3
2.0,
1.0,
complex(-3.0, -1.0),
complex(2.0, 2.0),
np.array([0.0, 2.0, 11.0]),
x,
1,
3,
)
return pot

Expand All @@ -36,7 +43,14 @@ def test_potbesldho():
# @pytest.mark.skip(reason="no fortran extension by default")
def test_potbesldv():
potv = besselaesnew.potbesldv(
2.0, 1.0, complex(-3.0, -1.0), complex(2.0, 2.0), [0.0, 2.0, 11.0], 1, 1, 3
2.0,
1.0,
complex(-3.0, -1.0),
complex(2.0, 2.0),
np.array([0.0, 2.0, 11.0]),
1,
1,
3,
)
assert_allclose(potv[0], np.array([-0.31055947, -0.23498503, -0.30327438]))
assert_allclose(potv[1], np.array([-0.17694283, -0.15257055, -0.17583515]))
Expand All @@ -45,13 +59,27 @@ def test_potbesldv():
# @pytest.mark.skip(reason="no fortran extension by default")
def test_disbesldho():
qxqy_zero = besselaesnew.disbesldho(
2.0, 1.0, complex(-3.0, -1.0), complex(2.0, 2.0), [0.0, 2.0, 11.0], 0, 1, 3
2.0,
1.0,
complex(-3.0, -1.0),
complex(2.0, 2.0),
np.array([0.0, 2.0, 11.0]),
0,
1,
3,
)
assert_allclose(qxqy_zero[0], np.array([-0.170131146, -0.18423853, -0.173157849]))
assert_allclose(qxqy_zero[1], np.array([0.0274405074, 0.0888068675, 0.0342656083]))

qxqy_one = besselaesnew.disbesldho(
2.0, 1.0, complex(-3.0, -1.0), complex(2.0, 2.0), [0.0, 2.0, 11.0], 1, 1, 3
2.0,
1.0,
complex(-3.0, -1.0),
complex(2.0, 2.0),
np.array([0.0, 2.0, 11.0]),
1,
1,
3,
)
assert_allclose(qxqy_one[0], np.array([-0.10412493, -0.1084466406, -0.104477618]))
assert_allclose(qxqy_one[1], np.array([0.106176131, 0.1162738781, 0.1067421121]))
Expand All @@ -60,7 +88,14 @@ def test_disbesldho():
# @pytest.mark.skip(reason="no fortran extension by default")
def test_disbesldv():
qxqyv = besselaesnew.disbesldv(
2.0, 1.0, complex(-3.0, -1.0), complex(2.0, 2.0), [0.0, 2.0, 11.0], 1, 1, 3
2.0,
1.0,
complex(-3.0, -1.0),
complex(2.0, 2.0),
np.array([0.0, 2.0, 11.0]),
1,
1,
3,
)
assert_allclose(
qxqyv[0],
Expand Down
9 changes: 5 additions & 4 deletions timml/well.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,7 @@ def initialize(self):
def setparams(self, sol):
self.parameters[:, 0] = sol


class LargeDiameterWell(WellBase, MscreenWellNoflowEquation):
"""
Experimental class for radial flow to large diameter well
Expand Down Expand Up @@ -529,7 +530,7 @@ def __init__(
yc=yc,
)
self.Qc = float(Qw)
self.screened = layers # layers where well is screened
self.screened = layers # layers where well is screened
self.nscreened = len(self.screened)
if self.nlayers == 1:
self.nunknowns = 0
Expand Down Expand Up @@ -557,7 +558,7 @@ def potinf(self, x, y, aq=None):
else:
pot[:] = -k0(r / aq.lab) / (2 * np.pi) / k0(self.rw / aq.lab)
rv[:] = self.aq.coef[self.layers] * pot
#rv[:] = pot
# rv[:] = pot
return rv

def disvecinf(self, x, y, aq=None):
Expand All @@ -579,11 +580,11 @@ def disvecinf(self, x, y, aq=None):
if aq.ilap:
qx[0] = -1 / (2 * np.pi) * xminxw / rsq
qy[0] = -1 / (2 * np.pi) * yminyw / rsq
kone = k1(r / aq.lab[1:]) / k0(self.rw / aq.lab[1:])
kone = k1(r / aq.lab[1:]) / k0(self.rw / aq.lab[1:])
qx[1:] = -kone * xminxw / (r * aq.lab[1:]) / (2 * np.pi)
qy[1:] = -kone * yminyw / (r * aq.lab[1:]) / (2 * np.pi)
else:
kone = k1(r / aq.lab) / k0(self.rw / aq.lab)
kone = k1(r / aq.lab) / k0(self.rw / aq.lab)
qx[:] = -kone * xminxw / (r * aq.lab) / (2 * np.pi)
qy[:] = -kone * yminyw / (r * aq.lab) / (2 * np.pi)
rv[0] = self.aq.coef[self.layers] * qx
Expand Down

0 comments on commit 9f447f2

Please sign in to comment.