46
46
from aesara .tensor .random .opt import local_subtensor_rv_lift
47
47
from aesara .tensor .random .var import RandomStateSharedVariable
48
48
from aesara .tensor .sharedvar import ScalarSharedVariable
49
- from aesara .tensor .var import TensorVariable , TensorConstant
49
+ from aesara .tensor .var import TensorConstant , TensorVariable
50
50
51
51
from pymc .aesaraf import (
52
52
compile_pymc ,
61
61
from pymc .distributions import joint_logpt
62
62
from pymc .distributions .logprob import _get_scaling
63
63
from pymc .distributions .transforms import _default_transform
64
- from pymc .exceptions import ImputationWarning , ShapeWarning , SamplingError , ShapeError
64
+ from pymc .exceptions import ImputationWarning , SamplingError , ShapeError , ShapeWarning
65
65
from pymc .initial_point import make_initial_point_fn
66
66
from pymc .math import flatten_list
67
67
from pymc .util import (
@@ -1180,7 +1180,7 @@ def set_data(
1180
1180
# NOTE: If there are multiple pm.MutableData containers sharing this dim, but the user only
1181
1181
# changes the values for one of them, they will run into shape problems nonetheless.
1182
1182
if length_changed :
1183
- if isinstance (length_tensor ,TensorConstant ):
1183
+ if isinstance (length_tensor , TensorConstant ):
1184
1184
raise ShapeError (
1185
1185
f"Resizing dimension '{ dname } ' is impossible, because "
1186
1186
f"a 'TensorConstant' stores its length. To be able "
@@ -1221,7 +1221,6 @@ def set_data(
1221
1221
# Updating the shared variable resizes dependent nodes that use this dimension for their `size`.
1222
1222
length_tensor .set_value (new_length )
1223
1223
1224
-
1225
1224
if new_coords is not None :
1226
1225
# Update the registered coord values (also if they were None)
1227
1226
if len (new_coords ) != new_length :
@@ -1232,7 +1231,6 @@ def set_data(
1232
1231
)
1233
1232
self ._coords [dname ] = new_coords
1234
1233
1235
-
1236
1234
shared_object .set_value (values )
1237
1235
1238
1236
def register_rv (
0 commit comments