Skip to content

Commit

Permalink
Implemented Generalized Gamma RV
Browse files Browse the repository at this point in the history
  • Loading branch information
Kyle Caron committed Jul 18, 2022
1 parent cac73ba commit 052447b
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 35 deletions.
56 changes: 29 additions & 27 deletions aesara/tensor/random/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,19 +331,13 @@ def rng_fn(cls, rng, mean, cov, size):
# Neither SciPy nor NumPy implement parameter broadcasting for
# multivariate normals (or any other multivariate distributions),
# so we need to implement that here
mean, cov = broadcast_params([mean, cov], cls.ndims_params)
size = tuple(size or ())

size = tuple(size or ())
if size:
if (
0 < mean.ndim - 1 <= len(size)
and size[-mean.ndim + 1 :] != mean.shape[:-1]
):
raise ValueError(
"shape mismatch: objects cannot be broadcast to a single shape"
)
mean = np.broadcast_to(mean, size + mean.shape[-1:])
cov = np.broadcast_to(cov, size + cov.shape[-2:])
else:
mean, cov = broadcast_params([mean, cov], cls.ndims_params)

res = np.empty(mean.shape)
for idx in np.ndindex(mean.shape[:-1]):
Expand Down Expand Up @@ -374,22 +368,12 @@ def rng_fn(cls, rng, alphas, size):
size = tuple(np.atleast_1d(size))

if size:
if (
0 < alphas.ndim - 1 <= len(size)
and size[-alphas.ndim + 1 :] != alphas.shape[:-1]
):
raise ValueError(
"shape mismatch: objects cannot be broadcast to a single shape"
)
samples_shape = size + alphas.shape[-1:]
else:
samples_shape = alphas.shape
alphas = np.broadcast_to(alphas, size + alphas.shape[-1:])

samples_shape = alphas.shape
samples = np.empty(samples_shape)
alphas_bcast = np.broadcast_to(alphas, samples_shape)

for index in np.ndindex(*samples_shape[:-1]):
samples[index] = rng.dirichlet(alphas_bcast[index])
samples[index] = rng.dirichlet(alphas[index])

return samples
else:
Expand Down Expand Up @@ -585,6 +569,26 @@ def rng_fn_scipy(cls, rng, n, a, b, size):
betabinom = BetaBinomialRV()


class GenGammaRV(ScipyRandomVariable):
name = "gengamma"
ndim_supp = 0
ndims_params = [0, 0, 0]
dtype = "floatX"
_print_name = ("GG", "\\operatorname{GG}")

def __call__(self, alpha=1.0, p=1.0, lambd=1.0, size=None, **kwargs):
return super().__call__(alpha, p, lambd, size=size, **kwargs)

@classmethod
def rng_fn_scipy(cls, rng, alpha, p, lambd, size):
return stats.gengamma.rvs(
alpha / p, p, scale=lambd, size=size, random_state=rng
)


gengamma = GenGammaRV()


class MultinomialRV(RandomVariable):
"""A Multinomial random variable type.
Expand All @@ -608,16 +612,13 @@ def _supp_shape_from_params(self, dist_params, rep_param_idx=1, param_shapes=Non
@classmethod
def rng_fn(cls, rng, n, p, size):
if n.ndim > 0 or p.ndim > 1:
n, p = broadcast_params([n, p], cls.ndims_params)
size = tuple(size or ())

if size:
if 0 < p.ndim - 1 <= len(size) and size[-p.ndim + 1 :] != p.shape[:-1]:
raise ValueError(
"shape mismatch: objects cannot be broadcast to a single shape"
)
n = np.broadcast_to(n, size)
p = np.broadcast_to(p, size + p.shape[-1:])
else:
n, p = broadcast_params([n, p], cls.ndims_params)

res = np.empty(p.shape, dtype=cls.dtype)
for idx in np.ndindex(p.shape[:-1]):
Expand Down Expand Up @@ -813,4 +814,5 @@ def __call__(self, x, **kwargs):
"uniform",
"standard_normal",
"negative_binomial",
"gengamma",
]
71 changes: 63 additions & 8 deletions tests/tensor/random/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
dirichlet,
exponential,
gamma,
gengamma,
geometric,
gumbel,
halfcauchy,
Expand Down Expand Up @@ -578,9 +579,9 @@ def test_mvnormal_samples(mu, cov, size):
def test_mvnormal_default_args():
compare_sample_values(multivariate_normal, test_fn=mvnormal_test_fn)

with pytest.raises(ValueError, match="shape mismatch.*"):
with pytest.raises(ValueError, match="operands could not be broadcast together "):
multivariate_normal.rng_fn(
None, np.zeros((1, 2)), np.ones((1, 2, 2)), size=(4,)
None, np.zeros((3, 2)), np.ones((3, 2, 2)), size=(4,)
)


Expand Down Expand Up @@ -654,11 +655,17 @@ def dirichlet_test_fn(mean=None, cov=None, size=None, random_state=None):
def test_dirichlet_rng():
alphas = np.array([[100, 1, 1], [1, 100, 1], [1, 1, 100]], dtype=config.floatX)

with pytest.raises(ValueError, match="shape mismatch.*"):
# The independent dimension's shape is missing from size (i.e. should
# be `(10, 2, 3)`)
with pytest.raises(ValueError, match="operands could not be broadcast together"):
# The independent dimension's shape cannot be broadcasted from (3,) to (10, 2)
dirichlet.rng_fn(None, alphas, size=(10, 2))

with pytest.raises(
ValueError, match="input operand has more dimensions than allowed"
):
# One of the independent dimension's shape is missing from size
# (i.e. should be `(1, 3)`)
dirichlet.rng_fn(None, np.broadcast_to(alphas, (1, 3, 3)), size=(3,))


M_at = iscalar("M")
M_at.tag.test_value = 3
Expand Down Expand Up @@ -1087,6 +1094,48 @@ def test_betabinom_samples(M, a, p, size):
)


@pytest.mark.parametrize(
"alpha, p, lambd, size",
[
(
np.array(2, dtype=config.floatX),
np.array(3, dtype=config.floatX),
np.array(5, dtype=config.floatX),
None,
),
(
np.array(1, dtype=config.floatX),
np.array(1, dtype=config.floatX),
np.array(10, dtype=config.floatX),
[],
),
(
np.array(2, dtype=config.floatX),
np.array(2, dtype=config.floatX),
np.array(10, dtype=config.floatX),
[2, 3],
),
(
np.full((1, 2), 2, dtype=config.floatX),
np.array(2, dtype=config.floatX),
np.array(10, dtype=config.floatX),
None,
),
],
)
def test_gengamma_samples(alpha, p, lambd, size):
compare_sample_values(
gengamma,
alpha,
p,
lambd,
size=size,
test_fn=lambda *args, size=None, random_state=None, **kwargs: gengamma.rng_fn(
random_state, *(args + (size,))
),
)


@pytest.mark.parametrize(
"M, p, size, test_fn",
[
Expand Down Expand Up @@ -1146,11 +1195,17 @@ def test_multinomial_rng():
test_M = np.array([10, 20], dtype=np.int64)
test_p = np.array([[0.999, 0.001], [0.001, 0.999]], dtype=config.floatX)

with pytest.raises(ValueError, match="shape mismatch.*"):
# The independent dimension's shape is missing from size (i.e. should
# be `(1, 2)`)
with pytest.raises(ValueError, match="operands could not be broadcast together"):
# The independent dimension's shape cannot be broadcasted from (2,) to (1,)
multinomial.rng_fn(None, test_M, test_p, size=(1,))

with pytest.raises(
ValueError, match="input operand has more dimensions than allowed"
):
# One of the independent dimension's shape is missing from size
# (i.e. should be `(5, 2)`)
multinomial.rng_fn(None, np.broadcast_to(test_M, (5, 2)), test_p, size=(2,))


@pytest.mark.parametrize(
"p, size, test_fn",
Expand Down

0 comments on commit 052447b

Please sign in to comment.