Skip to content

Commit 637bd38

Browse files
Revert "Automatically add SpecifyShape Op when full-length shape is given"
This reverts commit c99f15c.
1 parent faed5f1 commit 637bd38

File tree

5 files changed

+3
-65
lines changed

5 files changed

+3
-65
lines changed

RELEASE-NOTES.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
### New Features
1111
- The `CAR` distribution has been added to allow for use of conditional autoregressions which often are used in spatial and network models.
1212
- The dimensionality of model variables can now be parametrized through either of `shape`, `dims` or `size` (see [#4625](https://github.com/pymc-devs/pymc3/pull/4625)):
13-
- With `shape` the length of dimensions must be given numerically or as scalar Aesara `Variables`. A `SpecifyShape` `Op` is added automatically unless `Ellipsis` is used. Using `shape` restricts the model variable to the exact length and re-sizing is no longer possible.
13+
- With `shape` the length of dimensions must be given numerically or as scalar Aesara `Variables`. Using `shape` restricts the model variable to the exact length and re-sizing is no longer possible.
1414
- `dims` keeps model variables re-sizeable (for example through `pm.Data`) and leads to well defined coordinates in `InferenceData` objects.
1515
- The `size` kwarg creates new dimensions in addition to what is implied by RV parameters.
1616
- An `Ellipsis` (`...`) in the last position of `shape` or `dims` can be used as short-hand notation for implied dimensions.

pymc3/aesaraf.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545
from aesara.sandbox.rng_mrg import MRG_RandomStream as RandomStream
4646
from aesara.tensor.elemwise import Elemwise
4747
from aesara.tensor.random.op import RandomVariable
48-
from aesara.tensor.shape import SpecifyShape
4948
from aesara.tensor.sharedvar import SharedVariable
5049
from aesara.tensor.subtensor import AdvancedIncSubtensor, AdvancedIncSubtensor1
5150
from aesara.tensor.var import TensorVariable
@@ -147,8 +146,6 @@ def change_rv_size(
147146
Expand the existing size by `new_size`.
148147
149148
"""
150-
if isinstance(rv_var.owner.op, SpecifyShape):
151-
rv_var = rv_var.owner.inputs[0]
152149
rv_node = rv_var.owner
153150
rng, size, dtype, *dist_params = rv_node.inputs
154151
name = rv_var.name

pymc3/distributions/distribution.py

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828

2929
from aesara.graph.basic import Variable
3030
from aesara.tensor.random.op import RandomVariable
31-
from aesara.tensor.shape import SpecifyShape, specify_shape
3231

3332
from pymc3.aesaraf import change_rv_size, pandas_to_array
3433
from pymc3.distributions import _logcdf, _logp
@@ -247,13 +246,6 @@ def __new__(
247246
rv_out = cls.dist(*args, rng=rng, testval=None, **kwargs)
248247
n_implied = rv_out.ndim
249248

250-
# The `.dist()` can wrap automatically with a SpecifyShape Op which brings informative
251-
# error messages earlier in model construction.
252-
# Here, however, the underyling RV must be used - a new SpecifyShape Op can be added at the end.
253-
assert_shape = None
254-
if isinstance(rv_out.owner.op, SpecifyShape):
255-
rv_out, assert_shape = rv_out.owner.inputs
256-
257249
# `dims` are only available with this API, because `.dist()` can be used
258250
# without a modelcontext and dims are not tracked at the Aesara level.
259251
if dims is not None:
@@ -293,15 +285,7 @@ def __new__(
293285
# Assigning the testval earlier causes trouble because the RV may not be created with the final shape already.
294286
rv_out.tag.test_value = testval
295287

296-
rv_registered = model.register_rv(
297-
rv_out, name, observed, total_size, dims=dims, transform=transform
298-
)
299-
300-
# Wrapping in specify_shape now does not break transforms:
301-
if assert_shape is not None:
302-
rv_registered = specify_shape(rv_registered, assert_shape)
303-
304-
return rv_registered
288+
return model.register_rv(rv_out, name, observed, total_size, dims=dims, transform=transform)
305289

306290
@classmethod
307291
def dist(
@@ -323,9 +307,6 @@ def dist(
323307
324308
Ellipsis (...) may be used in the last position of the tuple,
325309
and automatically expand to the shape implied by RV inputs.
326-
327-
Without Ellipsis, a `SpecifyShape` Op is automatically applied,
328-
constraining this model variable to exactly the specified shape.
329310
size : int, tuple, Variable, optional
330311
A scalar or tuple for replicating the RV in addition
331312
to its implied shape/dimensionality.
@@ -342,7 +323,6 @@ def dist(
342323
raise NotImplementedError("The use of a `.dist(dims=...)` API is not yet supported.")
343324

344325
shape, _, size = _validate_shape_dims_size(shape=shape, size=size)
345-
assert_shape = None
346326

347327
# Create the RV without specifying size or testval.
348328
# The size will be expanded later (if necessary) and only then the testval fits.
@@ -351,16 +331,13 @@ def dist(
351331
if shape is None and size is None:
352332
size = ()
353333
elif shape is not None:
354-
# SpecifyShape is automatically applied for symbolic and non-Ellipsis shapes
355334
if isinstance(shape, Variable):
356-
assert_shape = shape
357335
size = ()
358336
else:
359337
if Ellipsis in shape:
360338
size = tuple(shape[:-1])
361339
else:
362340
size = tuple(shape[: len(shape) - rv_native.ndim])
363-
assert_shape = shape
364341
# no-op conditions:
365342
# `elif size is not None` (User already specified how to expand the RV)
366343
# `else` (Unreachable)
@@ -370,9 +347,6 @@ def dist(
370347
else:
371348
rv_out = rv_native
372349

373-
if assert_shape is not None:
374-
rv_out = specify_shape(rv_out, shape=assert_shape)
375-
376350
if testval is not None:
377351
rv_out.tag.test_value = testval
378352

pymc3/tests/test_logp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def test_logpt_incsubtensor(indices, shape):
8686
sigma = 0.001
8787
rng = aesara.shared(np.random.RandomState(232), borrow=True)
8888

89-
a = Normal.dist(mu, sigma, rng=rng)
89+
a = Normal.dist(mu, sigma, shape=shape, rng=rng)
9090
a.name = "a"
9191

9292
a_idx = at.set_subtensor(a[indices], data)

pymc3/tests/test_shape_handling.py

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -350,39 +350,6 @@ def test_dist_api_works(self):
350350
assert pm.Normal.dist(mu=mu, shape=(7, ...)).eval().shape == (7, 3)
351351
assert pm.Normal.dist(mu=mu, size=(4,)).eval().shape == (4, 3)
352352

353-
def test_auto_assert_shape(self):
354-
with pytest.raises(AssertionError, match="will never match"):
355-
pm.Normal.dist(mu=[1, 2], shape=[])
356-
357-
mu = at.vector(name="mu_input")
358-
rv = pm.Normal.dist(mu=mu, shape=[3, 4])
359-
f = aesara.function([mu], rv, mode=aesara.Mode("py"))
360-
assert f([1, 2, 3, 4]).shape == (3, 4)
361-
362-
with pytest.raises(AssertionError, match=r"Got shape \(3, 2\), expected \(3, 4\)."):
363-
f([1, 2])
364-
365-
# The `shape` can be symbolic!
366-
s = at.vector(dtype="int32")
367-
rv = pm.Uniform.dist(2, [4, 5], shape=s)
368-
f = aesara.function([s], rv, mode=aesara.Mode("py"))
369-
f(
370-
[
371-
2,
372-
]
373-
)
374-
with pytest.raises(
375-
AssertionError,
376-
match=r"Got 1 dimensions \(shape \(2,\)\), expected 2 dimensions with shape \(3, 4\).",
377-
):
378-
f([3, 4])
379-
with pytest.raises(
380-
AssertionError,
381-
match=r"Got 1 dimensions \(shape \(2,\)\), expected 0 dimensions with shape \(\).",
382-
):
383-
f([])
384-
pass
385-
386353
def test_lazy_flavors(self):
387354

388355
_validate_shape_dims_size(shape=5)

0 commit comments

Comments
 (0)