Skip to content

Always add values from drawn dict to givens #3792

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
+ Enable documentation generation via ReadTheDocs for upcoming v3 releases. (see [#4805](https://github.com/pymc-devs/pymc3/pull/4805)).
+ Remove `float128` dtype support (see [#4834](https://github.com/pymc-devs/pymc3/pull/4834)).
+ Use `to_tuple` function in `pm.fast_sample_posterior_predictive` to pass shape assertions (see [#4927](https://github.com/pymc-devs/pymc3/pull/4927)).
+ Fixed [bug in `draw_values`](https://github.com/pymc-devs/pymc3/issues/3789), in which values that had been drawn in a separate `_DrawValuesContext` were not added to the `givens` dictionary and lead to `ValueError: Cannot resolve inputs for ...` exceptions (see [#3792](https://github.com/pymc-devs/pymc3/pull/3792)).

### New Features
+ Generalized BART, bounded distributions like Binomial and Poisson can now be used as likelihoods (see [#4675](https://github.com/pymc-devs/pymc3/pull/4675), [#4709](https://github.com/pymc-devs/pymc3/pull/4709) and
Expand Down
2 changes: 1 addition & 1 deletion pymc3/distributions/dist_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ def incomplete_beta_cfe(a, b, x, small):
qkm1 = one
r = one

def _step(i, pkm1, pkm2, qkm1, qkm2, k1, k2, k3, k4, k5, k6, k7, k8, r):
def _step(_i, pkm1, pkm2, qkm1, qkm2, k1, k2, k3, k4, k5, k6, k7, k8, r):
xk = -(x * k1 * k2) / (k3 * k4)
pk = pkm1 + pkm2 * xk
qk = qkm1 + qkm2 * xk
Expand Down
14 changes: 3 additions & 11 deletions pymc3/distributions/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,8 +754,8 @@ def draw_values(params, point=None, size=None):
while stack:
next_ = stack.pop(0)
if (next_, size) in drawn:
# If the node already has a givens value, skip it
continue
# If the node already has a givens value, add it to givens
givens[next_.name] = (next_, drawn[(next_, size)])
elif isinstance(next_, (theano_constant, tt.sharedvar.SharedVariable)):
# If the node is a theano.tensor.TensorConstant or a
# theano.tensor.sharedvar.SharedVariable, its value will be
Expand Down Expand Up @@ -799,7 +799,7 @@ def draw_values(params, point=None, size=None):
[
node
for node in named_nodes_descendents[next_]
if node is not None and (node, size) not in drawn
if node is not None and getattr(node, "name", None) not in givens
]
)

Expand All @@ -823,14 +823,6 @@ def draw_values(params, point=None, size=None):
evaluated[param_idx] = drawn[(param, size)]
else:
try: # might evaluate in a bad order,
# Sometimes _draw_value recurrently calls draw_values.
# This may set values for certain nodes in the drawn
# dictionary, but they don't get added to the givens
# dictionary. Here, we try to fix that.
if param in named_nodes_ancestors:
for node in named_nodes_ancestors[param]:
if node.name not in givens and (node, size) in drawn:
givens[node.name] = (node, drawn[(node, size)])
value = _draw_value(param, point=point, givens=givens.values(), size=size)
evaluated[param_idx] = drawn[(param, size)] = value
givens[param.name] = (param, value)
Expand Down
35 changes: 34 additions & 1 deletion pymc3/tests/test_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import arviz as az
import numpy as np
import numpy.testing as npt
import pandas as pd
import pytest
import theano
import theano.tensor as tt
Expand Down Expand Up @@ -56,7 +57,7 @@ def test_parallel_sample_does_not_reuse_seed(self):
random_numbers = []
draws = []
for _ in range(2):
np.random.seed(1) # seeds in other processes don't effect main process
np.random.seed(1) # seeds in other processes don't affect main process
with self.model:
trace = pm.sample(100, tune=0, cores=cores, return_inferencedata=False)
# numpy thread mentioned race condition. might as well check none are equal
Expand Down Expand Up @@ -1108,6 +1109,38 @@ def test_potentials_warning(self):
pm.sample_prior_predictive(samples=5)


def test_prior_sampling_mixture():
"""
Added this test because the NormalMixture distribution did not support
component shape identification, causing prior predictive sampling to error out.
"""
old_faithful_df = pd.read_csv(pm.get_data("old_faithful.csv"))
old_faithful_df["std_waiting"] = (
old_faithful_df.waiting - old_faithful_df.waiting.mean()
) / old_faithful_df.waiting.std()
N = old_faithful_df.shape[0]
K = 30

def stick_breaking(beta):
portion_remaining = tt.concatenate([[1], tt.extra_ops.cumprod(1 - beta)[:-1]])
result = beta * portion_remaining
return result / tt.sum(result, axis=-1, keepdims=True)

with pm.Model() as model:
alpha = pm.Gamma("alpha", 1.0, 1.0)
beta = pm.Beta("beta", 1.0, alpha, shape=K)
w = pm.Deterministic("w", stick_breaking(beta))

tau = pm.Gamma("tau", 1.0, 1.0, shape=K)
lambda_ = pm.Gamma("lambda_", 10.0, 1.0, shape=K)
mu = pm.Normal("mu", 0, tau=lambda_ * tau, shape=K)
obs = pm.NormalMixture(
"obs", w, mu, tau=lambda_ * tau, observed=old_faithful_df.std_waiting.values
)

pm.sample_prior_predictive()


class TestSamplePosteriorPredictive:
def test_point_list_arg_bug_fspp(self, point_list_arg_bug_fixture):
pmodel, trace = point_list_arg_bug_fixture
Expand Down