Skip to content

Commit a13240c

Browse files
lucianopazSpaakrpgoldmanMichael Osthege
authored andcommitted
Always add values from drawn dict to givens
Co-authored-by: Eelke Spaak <[email protected]> Co-authored-by: Robert P. Goldman <[email protected]> Co-authored-by: Michael Osthege <[email protected]>
1 parent 791c53c commit a13240c

File tree

4 files changed

+40
-14
lines changed

4 files changed

+40
-14
lines changed

RELEASE-NOTES.md

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
+ Enable documentation generation via ReadTheDocs for upcoming v3 releases. (see [#4805](https://github.com/pymc-devs/pymc3/pull/4805)).
1010
+ Remove `float128` dtype support (see [#4834](https://github.com/pymc-devs/pymc3/pull/4834)).
1111
+ Use `to_tuple` function in `pm.fast_sample_posterior_predictive` to pass shape assertions (see [#4927](https://github.com/pymc-devs/pymc3/pull/4927)).
12+
+ Fixed [bug in `draw_values`](https://github.com/pymc-devs/pymc3/issues/3789), in which values that had been drawn in a separate `_DrawValuesContext` were not added to the `givens` dictionary and lead to `ValueError: Cannot resolve inputs for ...` exceptions (see [#3792](https://github.com/pymc-devs/pymc3/pull/3792)).
1213

1314
### New Features
1415
+ Generalized BART, bounded distributions like Binomial and Poisson can now be used as likelihoods (see [#4675](https://github.com/pymc-devs/pymc3/pull/4675), [#4709](https://github.com/pymc-devs/pymc3/pull/4709) and

pymc3/distributions/dist_math.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -457,7 +457,7 @@ def incomplete_beta_cfe(a, b, x, small):
457457
qkm1 = one
458458
r = one
459459

460-
def _step(i, pkm1, pkm2, qkm1, qkm2, k1, k2, k3, k4, k5, k6, k7, k8, r):
460+
def _step(_i, pkm1, pkm2, qkm1, qkm2, k1, k2, k3, k4, k5, k6, k7, k8, r):
461461
xk = -(x * k1 * k2) / (k3 * k4)
462462
pk = pkm1 + pkm2 * xk
463463
qk = qkm1 + qkm2 * xk

pymc3/distributions/distribution.py

+4-12
Original file line numberDiff line numberDiff line change
@@ -754,8 +754,8 @@ def draw_values(params, point=None, size=None):
754754
while stack:
755755
next_ = stack.pop(0)
756756
if (next_, size) in drawn:
757-
# If the node already has a givens value, skip it
758-
continue
757+
# If the node already has a givens value, add it to givens
758+
givens[next_.name] = (next_, drawn[(next_, size)])
759759
elif isinstance(next_, (theano_constant, tt.sharedvar.SharedVariable)):
760760
# If the node is a theano.tensor.TensorConstant or a
761761
# theano.tensor.sharedvar.SharedVariable, its value will be
@@ -798,8 +798,8 @@ def draw_values(params, point=None, size=None):
798798
stack.extend(
799799
[
800800
node
801-
for node in named_nodes_descendents[next_]
802-
if node is not None and (node, size) not in drawn
801+
for node in named_nodes_parents[next_]
802+
if node is not None and getattr(node, "name", None) not in givens
803803
]
804804
)
805805

@@ -823,14 +823,6 @@ def draw_values(params, point=None, size=None):
823823
evaluated[param_idx] = drawn[(param, size)]
824824
else:
825825
try: # might evaluate in a bad order,
826-
# Sometimes _draw_value recurrently calls draw_values.
827-
# This may set values for certain nodes in the drawn
828-
# dictionary, but they don't get added to the givens
829-
# dictionary. Here, we try to fix that.
830-
if param in named_nodes_ancestors:
831-
for node in named_nodes_ancestors[param]:
832-
if node.name not in givens and (node, size) in drawn:
833-
givens[node.name] = (node, drawn[(node, size)])
834826
value = _draw_value(param, point=point, givens=givens.values(), size=size)
835827
evaluated[param_idx] = drawn[(param, size)] = value
836828
givens[param.name] = (param, value)

pymc3/tests/test_sampling.py

+34-1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import arviz as az
2222
import numpy as np
2323
import numpy.testing as npt
24+
import pandas as pd
2425
import pytest
2526
import theano
2627
import theano.tensor as tt
@@ -56,7 +57,7 @@ def test_parallel_sample_does_not_reuse_seed(self):
5657
random_numbers = []
5758
draws = []
5859
for _ in range(2):
59-
np.random.seed(1) # seeds in other processes don't effect main process
60+
np.random.seed(1) # seeds in other processes don't affect main process
6061
with self.model:
6162
trace = pm.sample(100, tune=0, cores=cores, return_inferencedata=False)
6263
# numpy thread mentioned race condition. might as well check none are equal
@@ -1108,6 +1109,38 @@ def test_potentials_warning(self):
11081109
pm.sample_prior_predictive(samples=5)
11091110

11101111

1112+
def test_prior_sampling_mixture():
1113+
"""
1114+
Added this test because the NormalMixture distribution did not support
1115+
component shape identification, causing prior predictive sampling to error out.
1116+
"""
1117+
old_faithful_df = pd.read_csv(pm.get_data("old_faithful.csv"))
1118+
old_faithful_df["std_waiting"] = (
1119+
old_faithful_df.waiting - old_faithful_df.waiting.mean()
1120+
) / old_faithful_df.waiting.std()
1121+
N = old_faithful_df.shape[0]
1122+
K = 30
1123+
1124+
def stick_breaking(beta):
1125+
portion_remaining = tt.concatenate([[1], tt.extra_ops.cumprod(1 - beta)[:-1]])
1126+
result = beta * portion_remaining
1127+
return result / tt.sum(result, axis=-1, keepdims=True)
1128+
1129+
with pm.Model() as model:
1130+
alpha = pm.Gamma("alpha", 1.0, 1.0)
1131+
beta = pm.Beta("beta", 1.0, alpha, shape=K)
1132+
w = pm.Deterministic("w", stick_breaking(beta))
1133+
1134+
tau = pm.Gamma("tau", 1.0, 1.0, shape=K)
1135+
lambda_ = pm.Gamma("lambda_", 10.0, 1.0, shape=K)
1136+
mu = pm.Normal("mu", 0, tau=lambda_ * tau, shape=K)
1137+
obs = pm.NormalMixture(
1138+
"obs", w, mu, tau=lambda_ * tau, observed=old_faithful_df.std_waiting.values
1139+
)
1140+
1141+
pm.sample_prior_predictive()
1142+
1143+
11111144
class TestSamplePosteriorPredictive:
11121145
def test_point_list_arg_bug_fspp(self, point_list_arg_bug_fixture):
11131146
pmodel, trace = point_list_arg_bug_fixture

0 commit comments

Comments
 (0)