Skip to content

Commit

Permalink
Use more wide default init_dist in GaussianRandomWalk and AR and rais…
Browse files Browse the repository at this point in the history
…e UserWarning when not explicitly defined
  • Loading branch information
ricardoV94 committed May 31, 2022
1 parent 7e72067 commit 9149a3e
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 11 deletions.
24 changes: 17 additions & 7 deletions pymc/distributions/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,6 @@ class GaussianRandomWalk(distribution.Continuous):
sigma > 0, innovation standard deviation, defaults to 1.0
init_dist : unnamed distribution
Univariate distribution of the initial value, created with the `.dist()` API.
Defaults to a unit Normal.
.. warning:: init will be cloned, rendering them independent of the ones passed as input.
Expand Down Expand Up @@ -274,7 +273,12 @@ def dist(

# If no scalar distribution is passed then initialize with a Normal of same mu and sigma
if init_dist is None:
init_dist = Normal.dist(0, 1)
warnings.warn(
"Initial distribution not specified, defaulting to `Normal.dist(0, 100)`."
"You can specify an init_dist manually to suppress this warning.",
UserWarning,
)
init_dist = Normal.dist(0, 100)
else:
if not (
isinstance(init_dist, at.TensorVariable)
Expand Down Expand Up @@ -369,10 +373,10 @@ class AR(SymbolicDistribution):
constant: bool, optional
Whether the first element of rho should be used as a constant term in the AR
process. Defaults to False
init_dist: unnamed distribution, optional
Scalar or vector distribution for initial values. Defaults to a unit Normal.
Distribution should be created via the `.dist()` API, and have dimension
(*size, ar_order). If not, it will be automatically resized.
init_dist: unnamed distribution
Scalar or vector distribution for initial values. Distribution should be
created via the `.dist()` API, and have dimension (*size, ar_order). If not,
it will be automatically resized.
.. warning:: init_dist will be cloned, rendering it independent of the one passed as input.
Expand Down Expand Up @@ -461,7 +465,13 @@ def dist(
f"got ndim_supp={init_dist.owner.op.ndim_supp}.",
)
else:
init_dist = Normal.dist(0, 1, size=(*sigma.shape, ar_order))
warnings.warn(
"Initial distribution not specified, defaulting to "
"`Normal.dist(0, 100, shape=...)`. You can specify an init_dist "
"manually to suppress this warning.",
UserWarning,
)
init_dist = Normal.dist(0, 100, size=(*sigma.shape, ar_order))

# Tell Aeppl to ignore init_dist, as it will be accounted for in the logp term
init_dist = ignore_logprob(init_dist)
Expand Down
2 changes: 1 addition & 1 deletion pymc/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2610,7 +2610,7 @@ def test_gaussianrandomwalk(self):
def ref_logp(value, mu, sigma, steps):
# Relying on fact that init will be normal by default
return (
scipy.stats.norm.logpdf(value[0])
scipy.stats.norm.logpdf(value[0], 0, 100) # default init_dist has a scale 100
+ scipy.stats.norm.logpdf(np.diff(value), mu, sigma).sum()
)

Expand Down
6 changes: 3 additions & 3 deletions pymc/tests/test_distributions_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,11 +310,11 @@ def test_batched_rhos(self):
y_tp = np.random.randn(batch_size, steps)
with Model() as t0:
beta = Normal("beta", 0.0, 1.0, shape=(batch_size, ar_order), initval=beta_tp)
AR("y", beta, sigma=1.0, shape=(batch_size, steps), initval=y_tp)
AR("y", beta, sigma=1.0, init_dist=Normal.dist(0, 1), shape=(batch_size, steps), initval=y_tp)
with Model() as t1:
beta = Normal("beta", 0.0, 1.0, shape=(batch_size, ar_order), initval=beta_tp)
for i in range(batch_size):
AR(f"y_{i}", beta[i], sigma=1.0, shape=steps, initval=y_tp[i])
AR(f"y_{i}", beta[i], init_dist=Normal.dist(0, 1), sigma=1.0, shape=steps, initval=y_tp[i])

np.testing.assert_allclose(
t0.compile_logp()(t0.initial_point()),
Expand Down Expand Up @@ -379,7 +379,7 @@ def test_batched_init_dist(self):
beta_tp = aesara.shared(np.random.randn(ar_order), shape=(3,))
y_tp = np.random.randn(batch_size, steps)
with Model() as t0:
init_dist = Normal.dist(0.0, 1.0, size=(batch_size, ar_order))
init_dist = Normal.dist(0.0, 100.0, size=(batch_size, ar_order))
AR("y", beta_tp, sigma=0.01, init_dist=init_dist, steps=steps, initval=y_tp)
with Model() as t1:
for i in range(batch_size):
Expand Down

0 comments on commit 9149a3e

Please sign in to comment.