Unverified Commit 0456f397 authored by Fearghus's avatar Fearghus Committed by GitHub

NegativeBinomial distribution sample generator wrapper (#3866)

* Added wrapper for gamma sampling in negative binomial distribution.

* Fixed typo in posterior sampling warnings.

* Drop nuts init method from pm.sample (#3863)

* Dropped nuts init method

* Dropped nuts init method from tests

* Refined doc string and added release note

* Added wrapper for gamma sampling in negative binomial distribution.

* Fixed typo in posterior sampling warnings.

* Added test for negative binomial input shape.

* Updating release notes with changes to negative binomial sampling.
Co-authored-by: default avatarAlexandre ANDORRA <andorra.alexandre@gmail.com>
parent bf8552e2
......@@ -19,6 +19,7 @@
- In named models, `pm.Data` objects now get model-relative names (see [#3843](https://github.com/pymc-devs/pymc3/pull/3843)).
- `pm.sample` now takes 1000 draws and 1000 tuning samples by default, instead of 500 previously (see [#3855](https://github.com/pymc-devs/pymc3/pull/3855)).
- Dropped the outdated 'nuts' initialization method for `pm.sample` (see [#3863](https://github.com/pymc-devs/pymc3/pull/3863)).
- Moved argument division out of `NegativeBinomial` `random` method. Fixes [#3864](https://github.com/pymc-devs/pymc3/issues/3864) in the style of [#3509](https://github.com/pymc-devs/pymc3/pull/3509).
## PyMC3 3.8 (November 29 2019)
......
......@@ -673,12 +673,24 @@ class NegativeBinomial(Discrete):
array
"""
mu, alpha = draw_values([self.mu, self.alpha], point=point, size=size)
g = generate_samples(stats.gamma.rvs, alpha, scale=mu / alpha,
g = generate_samples(self._random, mu=mu, alpha=alpha,
dist_shape=self.shape,
size=size)
g[g == 0] = np.finfo(float).eps # Just in case
return np.asarray(stats.poisson.rvs(g)).reshape(g.shape)
def _random(self, mu, alpha, size):
""" Wrapper around stats.gamma.rvs that converts NegativeBinomial's
parametrization to scipy.gamma. All parameter arrays should have
been broadcasted properly by generate_samples at this point and size is
the scipy.rvs representation.
"""
return stats.gamma.rvs(
a=alpha,
scale=mu / alpha,
size=size,
)
def logp(self, value):
"""
Calculate log-probability of NegativeBinomial distribution at specified value.
......
......@@ -1567,9 +1567,9 @@ def sample_posterior_predictive(
nchain = 1
if keep_size and samples is not None:
raise IncorrectArgumentsError("Should not specify both keep_size and samples argukments")
raise IncorrectArgumentsError("Should not specify both keep_size and samples arguments")
if keep_size and size is not None:
raise IncorrectArgumentsError("Should not specify both keep_size and size argukments")
raise IncorrectArgumentsError("Should not specify both keep_size and size arguments")
if samples is None:
if isinstance(trace, MultiTrace):
......
......@@ -1176,6 +1176,31 @@ class TestNestedRandom(SeededTest):
with model:
return pm.sample_prior_predictive(prior_samples)
@pytest.mark.parametrize(
["prior_samples", "shape", "mu", "alpha"],
[
[10, (3,), (None, tuple()), (None, (3,))],
[10, (3,), (None, (3,)), (None, tuple())],
[10, (4, 3,), (None, (3,)), (None, (3,))],
[10, (4, 3,), (None, (3,)), (None, (4, 3))],
],
ids=str,
)
def test_NegativeBinomial(
self,
prior_samples,
shape,
mu,
alpha,
):
prior = self.sample_prior(
distribution=pm.NegativeBinomial,
shape=shape,
nested_rvs_info=dict(mu=mu, alpha=alpha),
prior_samples=prior_samples,
)
assert prior["target"].shape == (prior_samples,) + shape
@pytest.mark.parametrize(
["prior_samples", "shape", "psi", "mu", "alpha"],
[
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment