Skip to content

Commit 405fd10

Browse files
michaelosthegetwiecki
authored andcommitted
Upgrade to Theano-PyMC 1.1.0
Many gof imports were changed to explicit imports. This commit also changes the usage of change_flags because of pending deprecation.
1 parent e46b1b4 commit 405fd10

29 files changed

+98
-92
lines changed

docs/source/Advanced_usage_of_Theano_in_PyMC3.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ Now, we use this to define a theano op, that also computes the gradient::
186186
import theano
187187
import theano.tensor as tt
188188
import theano.tests.unittest_tools
189-
from theano.gof.op import Op
189+
from theano.graph.op import Op
190190

191191
class MuFromTheta(Op):
192192
itypes = [tt.dscalar]

pymc3/data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
import theano
2727
import theano.tensor as tt
2828

29-
from theano.gof.graph import Apply
29+
from theano.graph.basic import Apply
3030

3131
import pymc3 as pm
3232

@@ -296,7 +296,7 @@ class Minibatch(tt.TensorVariable):
296296

297297
RNG = collections.defaultdict(list) # type: Dict[str, List[Any]]
298298

299-
@theano.configparser.change_flags(compute_test_value="raise")
299+
@theano.config.change_flags(compute_test_value="raise")
300300
def __init__(
301301
self,
302302
data,

pymc3/distributions/dist_math.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@
2727

2828
from theano import scan
2929
from theano.compile.builders import OpFromGraph
30-
from theano.gof.graph import Apply
31-
from theano.gof.op import Op
30+
from theano.graph.basic import Apply
31+
from theano.graph.op import Op
3232
from theano.scalar import UnaryScalarOp, upgrade_to_float_no_complex
3333
from theano.scan import until
3434
from theano.tensor.slinalg import Cholesky

pymc3/distributions/distribution.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929

3030
import numpy as np
3131
import theano
32+
import theano.graph.basic
3233
import theano.tensor as tt
3334

3435
from theano import function
@@ -790,7 +791,7 @@ def draw_values(params, point=None, size=None):
790791
value = _draw_value(next_, point=point, givens=temp_givens, size=size)
791792
givens[next_.name] = (next_, value)
792793
drawn[(next_, size)] = value
793-
except theano.gof.fg.MissingInputError:
794+
except theano.graph.fg.MissingInputError:
794795
# The node failed, so we must add the node's parents to
795796
# the stack of nodes to try to draw from. We exclude the
796797
# nodes in the `params` list.
@@ -833,7 +834,7 @@ def draw_values(params, point=None, size=None):
833834
value = _draw_value(param, point=point, givens=givens.values(), size=size)
834835
evaluated[param_idx] = drawn[(param, size)] = value
835836
givens[param.name] = (param, value)
836-
except theano.gof.fg.MissingInputError:
837+
except theano.graph.fg.MissingInputError:
837838
missing_inputs.add(param_idx)
838839

839840
return [evaluated[j] for j in params] # set the order back
@@ -994,7 +995,7 @@ def _draw_value(param, point=None, givens=None, size=None):
994995
variables = values = []
995996
# We only truly care if the ancestors of param that were given
996997
# value have the matching dshape and val.shape
997-
param_ancestors = set(theano.gof.graph.ancestors([param], blockers=list(variables)))
998+
param_ancestors = set(theano.graph.basic.ancestors([param], blockers=list(variables)))
998999
inputs = [(var, val) for var, val in zip(variables, values) if var in param_ancestors]
9991000
if inputs:
10001001
input_vars, input_vals = list(zip(*inputs))

pymc3/distributions/mixture.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ def _comp_logp(self, value):
253253
val_shape = tuple(value.shape.eval())
254254
except AttributeError:
255255
val_shape = value.shape
256-
except theano.gof.MissingInputError:
256+
except theano.graph.fg.MissingInputError:
257257
val_shape = None
258258
try:
259259
self_shape = tuple(self.shape)

pymc3/distributions/multivariate.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323
import theano.tensor as tt
2424

2525
from scipy import linalg, stats
26-
from theano.gof.graph import Apply
27-
from theano.gof.op import Op, get_test_value
28-
from theano.gof.utils import TestValueError
26+
from theano.graph.basic import Apply
27+
from theano.graph.op import Op, get_test_value
28+
from theano.graph.utils import TestValueError
2929
from theano.tensor.nlinalg import det, eigh, matrix_inverse, trace
3030
from theano.tensor.slinalg import Cholesky
3131

pymc3/distributions/posterior_predictive.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@
2222
)
2323

2424
import numpy as np
25-
import theano
25+
import theano.graph.basic
26+
import theano.graph.fg
2627
import theano.tensor as tt
2728

2829
from arviz import InferenceData
@@ -422,7 +423,7 @@ def draw_values(self) -> List[np.ndarray]:
422423
assert isinstance(value, np.ndarray)
423424
givens[next_.name] = (next_, value)
424425
drawn[(next_, samples)] = value
425-
except theano.gof.fg.MissingInputError:
426+
except theano.graph.fg.MissingInputError:
426427
# The node failed, so we must add the node's parents to
427428
# the stack of nodes to try to draw from. We exclude the
428429
# nodes in the `params` list.
@@ -467,7 +468,7 @@ def draw_values(self) -> List[np.ndarray]:
467468
assert isinstance(value, np.ndarray)
468469
self.evaluated[param_idx] = drawn[(param, samples)] = value
469470
givens[param.name] = (param, value)
470-
except theano.gof.fg.MissingInputError:
471+
except theano.graph.fg.MissingInputError:
471472
missing_inputs.add(param_idx)
472473
return [self.evaluated[j] for j in params]
473474

@@ -661,7 +662,9 @@ def random_sample(
661662
variables = values = []
662663
# We only truly care if the ancestors of param that were given
663664
# value have the matching dshape and val.shape
664-
param_ancestors = set(theano.gof.graph.ancestors([param], blockers=list(variables)))
665+
param_ancestors = set(
666+
theano.graph.basic.ancestors([param], blockers=list(variables))
667+
)
665668
inputs = [
666669
(var, val) for var, val in zip(variables, values) if var in param_ancestors
667670
]

pymc3/math.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
import theano.tensor.slinalg # pylint: disable=unused-import
2626

2727
from scipy.linalg import block_diag as scipy_block_diag
28-
from theano.gof.graph import Apply
29-
from theano.gof.op import Op
28+
from theano.graph.basic import Apply
29+
from theano.graph.op import Op
3030

3131
# pylint: disable=unused-import
3232
from theano.tensor import (

pymc3/model.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,13 @@
2323
import numpy as np
2424
import scipy.sparse as sps
2525
import theano
26+
import theano.graph.basic
2627
import theano.sparse as sparse
2728
import theano.tensor as tt
2829

2930
from pandas import Series
3031
from theano.compile import SharedVariable
31-
from theano.gof.graph import Apply
32+
from theano.graph.basic import Apply
3233
from theano.tensor.var import TensorVariable
3334

3435
import pymc3 as pm
@@ -284,7 +285,7 @@ def __enter__(self):
284285
# self._theano_config is set in Model.__new__
285286
self._config_context = None
286287
if hasattr(self, "_theano_config"):
287-
self._config_context = theano.change_flags(**self._theano_config)
288+
self._config_context = theano.config.change_flags(**self._theano_config)
288289
self._config_context.__enter__()
289290
return self
290291

@@ -1704,7 +1705,7 @@ def pandas_to_array(data):
17041705
ret = data
17051706
else: # empty mask
17061707
ret = data.filled()
1707-
elif isinstance(data, theano.gof.graph.Variable):
1708+
elif isinstance(data, theano.graph.basic.Variable):
17081709
ret = data
17091710
elif sps.issparse(data):
17101711
ret = data
@@ -1795,7 +1796,7 @@ def __init__(
17951796

17961797
if type is None:
17971798
data = pandas_to_array(data)
1798-
if isinstance(data, theano.gof.graph.Variable):
1799+
if isinstance(data, theano.graph.basic.Variable):
17991800
type = data.type
18001801
else:
18011802
type = TensorType(distribution.dtype, data.shape)

pymc3/model_graph.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
VarName = str
1919

2020
from theano.compile import SharedVariable
21-
from theano.gof.graph import stack_search
21+
from theano.graph.basic import walk
2222
from theano.tensor import Tensor
2323

2424
import pymc3 as pm
@@ -69,7 +69,7 @@ def _expand(node) -> Optional[Iterator[Tensor]]:
6969
else:
7070
return None
7171

72-
list(stack_search(deque([func]), _expand, bfs=True))
72+
list(walk(deque([func]), _expand, bfs=True))
7373
return retval
7474

7575
def _filter_parents(self, var, parents) -> Set[VarName]:

pymc3/ode/ode.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
import theano
2020
import theano.tensor as tt
2121

22-
from theano.gof.graph import Apply
23-
from theano.gof.op import Op, get_test_value
22+
from theano.graph.basic import Apply
23+
from theano.graph.op import Op, get_test_value
2424

2525
from pymc3.exceptions import DtypeError, ShapeError
2626
from pymc3.ode import utils

pymc3/sampling_jax.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import jax
1212
import numpy as np
1313
import pandas as pd
14-
import theano
14+
import theano.graph.fg
1515

1616
from theano.link.jax.jax_dispatch import jax_funcify
1717

@@ -45,7 +45,7 @@ def sample_tfp_nuts(
4545

4646
seed = jax.random.PRNGKey(random_seed)
4747

48-
fgraph = theano.gof.FunctionGraph(model.free_RVs, [model.logpt])
48+
fgraph = theano.graph.fg.FunctionGraph(model.free_RVs, [model.logpt])
4949
fns = jax_funcify(fgraph)
5050
logp_fn_jax = fns[0]
5151

@@ -130,7 +130,7 @@ def sample_numpyro_nuts(
130130

131131
seed = jax.random.PRNGKey(random_seed)
132132

133-
fgraph = theano.gof.FunctionGraph(model.free_RVs, [model.logpt])
133+
fgraph = theano.graph.fg.FunctionGraph(model.free_RVs, [model.logpt])
134134
fns = jax_funcify(fgraph)
135135
logp_fn_jax = fns[0]
136136

pymc3/step_methods/gibbs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
searchsorted,
3232
)
3333
from numpy.random import uniform
34-
from theano.gof.graph import inputs
34+
from theano.graph.basic import graph_inputs
3535
from theano.tensor import add
3636

3737
from pymc3.distributions.discrete import Categorical
@@ -80,7 +80,7 @@ def competence(var, has_grad):
8080

8181

8282
def elemwise_logp(model, var):
83-
terms = [v.logp_elemwiset for v in model.basic_RVs if var in inputs([v.logpt])]
83+
terms = [v.logp_elemwiset for v in model.basic_RVs if var in graph_inputs([v.logpt])]
8484
return model.fn(add(*terms))
8585

8686

pymc3/tests/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,22 +21,22 @@
2121

2222
@pytest.fixture(scope="function", autouse=True)
2323
def theano_config():
24-
config = theano.configparser.change_flags(compute_test_value="raise")
24+
config = theano.config.change_flags(compute_test_value="raise")
2525
with config:
2626
yield
2727

2828

2929
@pytest.fixture(scope="function", autouse=True)
3030
def exception_verbosity():
31-
config = theano.configparser.change_flags(exception_verbosity="high")
31+
config = theano.config.change_flags(exception_verbosity="high")
3232
with config:
3333
yield
3434

3535

3636
@pytest.fixture(scope="function", autouse=False)
3737
def strict_float32():
3838
if theano.config.floatX == "float32":
39-
config = theano.configparser.change_flags(warn_float64="raise")
39+
config = theano.config.change_flags(warn_float64="raise")
4040
with config:
4141
yield
4242
else:

pymc3/tests/test_dist_math.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ def test_logp(self):
167167
logp = logp_f(cov_val, delta_val)
168168
npt.assert_allclose(logp, expect)
169169

170-
@theano.configparser.change_flags(compute_test_value="ignore")
170+
@theano.config.change_flags(compute_test_value="ignore")
171171
def test_grad(self):
172172
np.random.seed(42)
173173

@@ -190,7 +190,7 @@ def func(chol_vec, delta):
190190
verify_grad(func, [chol_vec_val, delta_val])
191191

192192
@pytest.mark.skip(reason="Fix in theano not released yet: Theano#5908")
193-
@theano.configparser.change_flags(compute_test_value="ignore")
193+
@theano.config.change_flags(compute_test_value="ignore")
194194
def test_hessian(self):
195195
chol_vec = tt.vector("chol_vec")
196196
chol_vec.tag.test_value = np.array([0.1, 2, 3])
@@ -209,14 +209,14 @@ def test_hessian(self):
209209

210210

211211
class TestSplineWrapper:
212-
@theano.configparser.change_flags(compute_test_value="ignore")
212+
@theano.config.change_flags(compute_test_value="ignore")
213213
def test_grad(self):
214214
x = np.linspace(0, 1, 100)
215215
y = x * x
216216
spline = SplineWrapper(interpolate.InterpolatedUnivariateSpline(x, y, k=1))
217217
verify_grad(spline, [0.5])
218218

219-
@theano.configparser.change_flags(compute_test_value="ignore")
219+
@theano.config.change_flags(compute_test_value="ignore")
220220
def test_hessian(self):
221221
x = np.linspace(0, 1, 100)
222222
y = x * x
@@ -228,7 +228,7 @@ def test_hessian(self):
228228

229229

230230
class TestI0e:
231-
@theano.configparser.change_flags(compute_test_value="ignore")
231+
@theano.config.change_flags(compute_test_value="ignore")
232232
def test_grad(self):
233233
verify_grad(i0e, [0.5])
234234
verify_grad(i0e, [-2.0])

pymc3/tests/test_math.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def setup_method(self):
160160
self.op_class = LogDet
161161
self.op = logdet
162162

163-
@theano.configparser.change_flags(compute_test_value="ignore")
163+
@theano.config.change_flags(compute_test_value="ignore")
164164
def validate(self, input_mat):
165165
x = theano.tensor.matrix()
166166
f = theano.function([x], self.op(x))

pymc3/tests/test_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -167,13 +167,13 @@ def test_observed_type(self):
167167

168168
class TestTheanoConfig:
169169
def test_set_testval_raise(self):
170-
with theano.configparser.change_flags(compute_test_value="off"):
170+
with theano.config.change_flags(compute_test_value="off"):
171171
with pm.Model():
172172
assert theano.config.compute_test_value == "raise"
173173
assert theano.config.compute_test_value == "off"
174174

175175
def test_nested(self):
176-
with theano.configparser.change_flags(compute_test_value="off"):
176+
with theano.config.change_flags(compute_test_value="off"):
177177
with pm.Model(theano_config={"compute_test_value": "ignore"}):
178178
assert theano.config.compute_test_value == "ignore"
179179
with pm.Model(theano_config={"compute_test_value": "warn"}):

pymc3/tests/test_model_helpers.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,8 @@ def test_pandas_to_array(self):
7979

8080
# Check function behavior with Theano graph variable
8181
theano_output = func(theano_graph_input)
82-
assert isinstance(theano_output, theano.gof.graph.Variable)
82+
assert isinstance(theano_output, theano.graph.basic.Variable)
83+
assert theano_output == theano_graph_input
8384
assert theano_output.owner.inputs[0].name == input_name
8485

8586
# Check function behavior with generator data

pymc3/tests/test_random.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,9 +114,9 @@ def test_dep_vars(self):
114114
]
115115
)
116116

117-
def test_gof_constant(self):
117+
def test_graph_constant(self):
118118
# Issue 3595 pointed out that slice(None) can introduce
119-
# theano.gof.graph.Constant into the compute graph, which wasn't
119+
# theano.graph.basic.Constant into the compute graph, which wasn't
120120
# handled correctly by draw_values
121121
n_d = 500
122122
n_x = 2

pymc3/tests/test_step.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
from numpy.testing import assert_array_almost_equal
2929
from theano.compile.ops import as_op
30-
from theano.gof.op import Op
30+
from theano.graph.op import Op
3131

3232
from pymc3.data import Data
3333
from pymc3.distributions import (

0 commit comments

Comments
 (0)