Skip to content

refactor pareto and laplace #4691

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
May 14, 2021
Merged
83 changes: 21 additions & 62 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
halfcauchy,
halfnormal,
invgamma,
laplace,
logistic,
lognormal,
normal,
Expand Down Expand Up @@ -152,10 +153,16 @@ def default_transform(cls):

def transform_params(rv_var):
_, _, _, *args = rv_var.owner.inputs
lower = args[cls.bound_args_indices[0]]
upper = args[cls.bound_args_indices[1]]

lower, upper = None, None
if cls.bound_args_indices[0] is not None:
lower = args[cls.bound_args_indices[0]]
if cls.bound_args_indices[1] is not None:
upper = args[cls.bound_args_indices[1]]

lower = at.as_tensor_variable(lower) if lower is not None else None
upper = at.as_tensor_variable(upper) if upper is not None else None

return lower, upper

return transforms.interval(transform_params)
Expand Down Expand Up @@ -1505,37 +1512,17 @@ class Laplace(Continuous):
b: float
Scale parameter (b > 0).
"""
rv_op = laplace

def __init__(self, mu, b, *args, **kwargs):
super().__init__(*args, **kwargs)
self.b = b = at.as_tensor_variable(floatX(b))
self.mean = self.median = self.mode = self.mu = mu = at.as_tensor_variable(floatX(mu))

self.variance = 2 * self.b ** 2
@classmethod
def dist(cls, mu, b, *args, **kwargs):
b = at.as_tensor_variable(floatX(b))
mu = at.as_tensor_variable(floatX(mu))

assert_negative_support(b, "b", "Laplace")
return super().dist([mu, b], *args, **kwargs)

def random(self, point=None, size=None):
"""
Draw random values from Laplace distribution.

Parameters
----------
point: dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size: int, optional
Desired size of random sample (returns one sample if not
specified).

Returns
-------
array
"""
# mu, b = draw_values([self.mu, self.b], point=point, size=size)
# return generate_samples(np.random.laplace, mu, b, dist_shape=self.shape, size=size)

def logp(self, value):
def logp(value, mu, b):
"""
Calculate log-probability of Laplace distribution at specified value.

Expand All @@ -1549,12 +1536,9 @@ def logp(self, value):
-------
TensorVariable
"""
mu = self.mu
b = self.b

return -at.log(2 * b) - abs(value - mu) / b

def logcdf(self, value):
def logcdf(value, mu, b):
"""
Compute the log of the cumulative distribution function for Laplace distribution
at the specified value.
Expand All @@ -1569,12 +1553,10 @@ def logcdf(self, value):
-------
TensorVariable
"""
a = self.mu
b = self.b
y = (value - a) / b
y = (value - mu) / b
return bound(
at.switch(
at.le(value, a),
at.le(value, mu),
at.log(0.5) + y,
at.switch(
at.gt(y, 1),
Expand Down Expand Up @@ -1980,7 +1962,7 @@ def logcdf(self, value):
)


class Pareto(Continuous):
class Pareto(BoundedContinuous):
r"""
Pareto log-likelihood.

Expand Down Expand Up @@ -2026,6 +2008,7 @@ class Pareto(Continuous):
Scale parameter (m > 0).
"""
rv_op = pareto
bound_args_indices = (1, None) # lower-bounded by `m`

@classmethod
def dist(
Expand All @@ -2039,30 +2022,6 @@ def dist(

return super().dist([alpha, m], **kwargs)

def _random(self, alpha, m, size=None):
u = np.random.uniform(size=size)
return m * (1.0 - u) ** (-1.0 / alpha)

def random(self, point=None, size=None):
"""
Draw random values from Pareto distribution.

Parameters
----------
point: dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size: int, optional
Desired size of random sample (returns one sample if not
specified).

Returns
-------
array
"""
# alpha, m = draw_values([self.alpha, self.m], point=point, size=size)
# return generate_samples(self._random, alpha, m, dist_shape=self.shape, size=size)

def logp(
value: Union[float, np.ndarray, TensorVariable],
alpha: Union[float, np.ndarray, TensorVariable],
Expand Down
1 change: 0 additions & 1 deletion pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1250,7 +1250,6 @@ def test_negative_binomial_init_fail(self, mu, p, alpha, n, expected):
with pytest.raises(ValueError, match=f"Incompatible parametrization. {expected}"):
NegativeBinomial("x", mu=mu, p=p, alpha=alpha, n=n)

@pytest.mark.xfail(reason="Distribution not refactored yet")
def test_laplace(self):
self.check_logp(
Laplace,
Expand Down
39 changes: 26 additions & 13 deletions pymc3/tests/test_distributions_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,6 @@ class TestKumaraswamy(BaseTestCases.BaseTestCase):
params = {"a": 1.0, "b": 1.0}


@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
class TestLaplace(BaseTestCases.BaseTestCase):
distribution = pm.Laplace
params = {"mu": 1.0, "b": 1.0}


@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
class TestAsymmetricLaplace(BaseTestCases.BaseTestCase):
distribution = pm.AsymmetricLaplace
Expand Down Expand Up @@ -449,6 +443,32 @@ def seeded_discrete_weibul_rng_fn(self):
]


class TestPareto(BaseTestDistribution):
pymc_dist = pm.Pareto
pymc_dist_params = {"alpha": 3.0, "m": 2.0}
expected_rv_op_params = {"alpha": 3.0, "m": 2.0}
reference_dist_params = {"b": 3.0, "scale": 2.0}
reference_dist = seeded_scipy_distribution_builder("pareto")
tests_to_run = [
"check_pymc_params_match_rv_op",
"check_pymc_draws_match_reference",
"check_rv_size",
]


class TestLaplace(BaseTestDistribution):
pymc_dist = pm.Laplace
pymc_dist_params = {"mu": 0.0, "b": 1.0}
expected_rv_op_params = {"mu": 0.0, "b": 1.0}
reference_dist_params = {"loc": 0.0, "scale": 1.0}
reference_dist = seeded_scipy_distribution_builder("laplace")
tests_to_run = [
"check_pymc_params_match_rv_op",
"check_pymc_draws_match_reference",
"check_rv_size",
]


class TestGumbel(BaseTestDistribution):
pymc_dist = pm.Gumbel
pymc_dist_params = {"mu": 1.5, "beta": 3.0}
Expand Down Expand Up @@ -1102,13 +1122,6 @@ def ref_rand(size, mu, lam, alpha):
ref_rand=ref_rand,
)

@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
def test_laplace(self):
def ref_rand(size, mu, b):
return st.laplace.rvs(mu, b, size=size)

pymc3_random(pm.Laplace, {"mu": R, "b": Rplus}, ref_rand=ref_rand)

@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
def test_laplace_asymmetric(self):
def ref_rand(size, kappa, b, mu):
Expand Down