diff --git a/pymc3/sampling_jax.py b/pymc3/sampling_jax.py index 22e686ff88..6e250059db 100644 --- a/pymc3/sampling_jax.py +++ b/pymc3/sampling_jax.py @@ -12,8 +12,8 @@ import numpy as np import pandas as pd import theano -import theano.sandbox.jax_linker -import theano.sandbox.jaxify + +from theano.link.jax.jax_dispatch import jax_funcify import pymc3 as pm @@ -46,7 +46,7 @@ def sample_tfp_nuts( seed = jax.random.PRNGKey(random_seed) fgraph = theano.gof.FunctionGraph(model.free_RVs, [model.logpt]) - fns = theano.sandbox.jaxify.jax_funcify(fgraph) + fns = jax_funcify(fgraph) logp_fn_jax = fns[0] rv_names = [rv.name for rv in model.free_RVs] @@ -131,7 +131,7 @@ def sample_numpyro_nuts( seed = jax.random.PRNGKey(random_seed) fgraph = theano.gof.FunctionGraph(model.free_RVs, [model.logpt]) - fns = theano.sandbox.jaxify.jax_funcify(fgraph) + fns = jax_funcify(fgraph) logp_fn_jax = fns[0] rv_names = [rv.name for rv in model.free_RVs] diff --git a/pymc3/tests/helpers.py b/pymc3/tests/helpers.py index 41e132a4cf..6e56fad9d0 100644 --- a/pymc3/tests/helpers.py +++ b/pymc3/tests/helpers.py @@ -20,7 +20,7 @@ import theano from theano.gradient import verify_grad as tt_verify_grad -from theano.sandbox.rng_mrg import MRG_RandomStreams +from theano.sandbox.rng_mrg import MRG_RandomStream as RandomStream from pymc3.theanof import set_tt_rng, tt_rng @@ -35,7 +35,7 @@ def setup_class(cls): def setup_method(self): nr.seed(self.random_seed) self.old_tt_rng = tt_rng() - set_tt_rng(MRG_RandomStreams(self.random_seed)) + set_tt_rng(RandomStream(self.random_seed)) def teardown_method(self): set_tt_rng(self.old_tt_rng) diff --git a/pymc3/tests/test_variational_inference.py b/pymc3/tests/test_variational_inference.py index 48de33ada2..dd5d64275d 100644 --- a/pymc3/tests/test_variational_inference.py +++ b/pymc3/tests/test_variational_inference.py @@ -27,7 +27,7 @@ from pymc3.tests import models from pymc3.tests.helpers import not_raises -from pymc3.theanof import change_flags, intX +from pymc3.theanof import intX from pymc3.variational import flows, opvi from pymc3.variational.approximations import ( Empirical, @@ -533,17 +533,20 @@ def test_scale_cost_to_minibatch_works(aux_total_size): sigma = 1.0 y_obs = np.array([1.6, 1.4]) beta = len(y_obs) / float(aux_total_size) - post_mu = np.array([1.88], dtype=theano.config.floatX) - post_sigma = np.array([1], dtype=theano.config.floatX) # TODO: theano_config # with pm.Model(theano_config=dict(floatX='float64')): # did not not work as expected # there were some numeric problems, so float64 is forced - with pm.theanof.change_flags(floatX="float64", warn_float64="ignore"): + with theano.config.change_flags(floatX="float64", warn_float64="ignore"): + + assert theano.config.floatX == "float64" + assert theano.config.warn_float64 == "ignore" + + post_mu = np.array([1.88], dtype=theano.config.floatX) + post_sigma = np.array([1], dtype=theano.config.floatX) + with pm.Model(): - assert theano.config.floatX == "float64" - assert theano.config.warn_float64 == "ignore" mu = pm.Normal("mu", mu=mu0, sigma=sigma) pm.Normal("y", mu=mu, sigma=1, observed=y_obs, total_size=aux_total_size) # Create variational gradient tensor @@ -552,7 +555,7 @@ def test_scale_cost_to_minibatch_works(aux_total_size): mean_field_1.shared_params["mu"].set_value(post_mu) mean_field_1.shared_params["rho"].set_value(np.log(np.exp(post_sigma) - 1)) - with pm.theanof.change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): elbo_via_total_size_scaled = -pm.operators.KL(mean_field_1)()(10000) with pm.Model(): @@ -566,7 +569,7 @@ def test_scale_cost_to_minibatch_works(aux_total_size): mean_field_2.shared_params["mu"].set_value(post_mu) mean_field_2.shared_params["rho"].set_value(np.log(np.exp(post_sigma) - 1)) - with pm.theanof.change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): elbo_via_total_size_unscaled = -pm.operators.KL(mean_field_2)()(10000) np.testing.assert_allclose( @@ -583,9 +586,12 @@ def test_elbo_beta_kl(aux_total_size): sigma = 1.0 y_obs = np.array([1.6, 1.4]) beta = len(y_obs) / float(aux_total_size) - post_mu = np.array([1.88], dtype=theano.config.floatX) - post_sigma = np.array([1], dtype=theano.config.floatX) - with pm.theanof.change_flags(floatX="float64", warn_float64="ignore"): + + with theano.config.change_flags(floatX="float64", warn_float64="ignore"): + + post_mu = np.array([1.88], dtype=theano.config.floatX) + post_sigma = np.array([1], dtype=theano.config.floatX) + with pm.Model(): mu = pm.Normal("mu", mu=mu0, sigma=sigma) pm.Normal("y", mu=mu, sigma=1, observed=y_obs, total_size=aux_total_size) @@ -595,7 +601,7 @@ def test_elbo_beta_kl(aux_total_size): mean_field_1.shared_params["mu"].set_value(post_mu) mean_field_1.shared_params["rho"].set_value(np.log(np.exp(post_sigma) - 1)) - with pm.theanof.change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): elbo_via_total_size_scaled = -pm.operators.KL(mean_field_1)()(10000) with pm.Model(): @@ -606,7 +612,7 @@ def test_elbo_beta_kl(aux_total_size): mean_field_3.shared_params["mu"].set_value(post_mu) mean_field_3.shared_params["rho"].set_value(np.log(np.exp(post_sigma) - 1)) - with pm.theanof.change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): elbo_via_beta_kl = -pm.operators.KL(mean_field_3, beta=beta)()(10000) np.testing.assert_allclose( @@ -1014,7 +1020,7 @@ def init_(**kw): def test_flow_det(flow_spec): z0 = tt.arange(0, 20).astype("float32") flow = flow_spec(dim=20, z0=z0.dimshuffle("x", 0)) - with change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): z1 = flow.forward.flatten() J = tt.jacobian(z1, z0) logJdet = tt.log(tt.abs_(tt.nlinalg.det(J))) @@ -1030,7 +1036,7 @@ def test_flow_det_local(flow_spec): params[k] = np.random.randn(1, *shp).astype("float32") flow = flow_spec(dim=12, z0=z0.reshape((1, 1, 12)), **params) assert flow.batched - with change_flags(compute_test_value="off"): + with theano.config.change_flags(compute_test_value="off"): z1 = flow.forward.flatten() J = tt.jacobian(z1, z0) logJdet = tt.log(tt.abs_(tt.nlinalg.det(J))) diff --git a/pymc3/theanof.py b/pymc3/theanof.py index 817488b4fd..aeeb64e5b0 100644 --- a/pymc3/theanof.py +++ b/pymc3/theanof.py @@ -19,7 +19,7 @@ from theano import tensor as tt from theano.gof import Op from theano.gof.graph import inputs -from theano.sandbox.rng_mrg import MRG_RandomStreams +from theano.sandbox.rng_mrg import MRG_RandomStream as RandomStream from pymc3.blocking import ArrayOrdering from pymc3.data import GeneratorAdapter @@ -394,7 +394,7 @@ def generator(gen, default=None): return GeneratorOp(gen, default)() -_tt_rng = MRG_RandomStreams() +_tt_rng = RandomStream() def tt_rng(random_seed=None): @@ -409,14 +409,14 @@ def tt_rng(random_seed=None): Returns ------- - `theano.sandbox.rng_mrg.MRG_RandomStreams` instance - `theano.sandbox.rng_mrg.MRG_RandomStreams` + `theano.tensor.random.utils.RandomStream` instance + `theano.tensor.random.utils.RandomStream` instance passed to the most recent call of `set_tt_rng` """ if random_seed is None: return _tt_rng else: - ret = MRG_RandomStreams(random_seed) + ret = RandomStream(random_seed) return ret @@ -426,14 +426,14 @@ def set_tt_rng(new_rng): Parameters ---------- - new_rng: `theano.sandbox.rng_mrg.MRG_RandomStreams` instance + new_rng: `theano.tensor.random.utils.RandomStream` instance The random number generator to use. """ # pylint: disable=global-statement global _tt_rng # pylint: enable=global-statement if isinstance(new_rng, int): - new_rng = MRG_RandomStreams(new_rng) + new_rng = RandomStream(new_rng) _tt_rng = new_rng diff --git a/pymc3/variational/opvi.py b/pymc3/variational/opvi.py index e964076027..b12b56110c 100644 --- a/pymc3/variational/opvi.py +++ b/pymc3/variational/opvi.py @@ -1078,9 +1078,9 @@ def _new_initial(self, size, deterministic, more_replacements=None): if deterministic: return tt.ones(shape, dtype) * dist_map else: - return getattr(self._rng, dist_name)(shape) + return getattr(self._rng, dist_name)(size=shape) else: - sample = getattr(self._rng, dist_name)(shape) + sample = getattr(self._rng, dist_name)(size=shape) initial = tt.switch(deterministic, tt.ones(shape, dtype) * dist_map, sample) return initial diff --git a/requirements.txt b/requirements.txt index 1e29e212ea..6435bed13e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,5 +5,5 @@ numpy>=1.13.0 pandas>=0.18.0 patsy>=0.5.1 scipy>=0.18.1 -theano-pymc==1.0.12 +theano-pymc==1.0.14 typing-extensions>=3.7.4