61
61
from pymc .distributions import joint_logpt
62
62
from pymc .distributions .logprob import _get_scaling
63
63
from pymc .distributions .transforms import _default_transform
64
- from pymc .exceptions import ImputationWarning , SamplingError , ShapeError , ShapeWarning
64
+ from pymc .exceptions import ImputationWarning , SamplingError , ShapeError
65
65
from pymc .initial_point import make_initial_point_fn
66
66
from pymc .math import flatten_list
67
67
from pymc .util import (
@@ -1180,25 +1180,20 @@ def set_data(
1180
1180
# NOTE: If there are multiple pm.MutableData containers sharing this dim, but the user only
1181
1181
# changes the values for one of them, they will run into shape problems nonetheless.
1182
1182
if length_changed :
1183
+ if original_coords is not None :
1184
+ if new_coords is None :
1185
+ raise ValueError (
1186
+ f"The '{ name } ' variable already had { len (original_coords )} coord values defined for "
1187
+ f"its { dname } dimension. With the new values this dimension changes to length "
1188
+ f"{ new_length } , so new coord values for the { dname } dimension are required."
1189
+ )
1183
1190
if isinstance (length_tensor , TensorConstant ):
1184
1191
raise ShapeError (
1185
1192
f"Resizing dimension '{ dname } ' is impossible, because "
1186
1193
f"a 'TensorConstant' stores its length. To be able "
1187
1194
f"to change the dimension length, 'fixed' in "
1188
1195
f"'model.add_coord' must be set to `False`."
1189
1196
)
1190
- if length_tensor .owner is None :
1191
- # This is the case if the dimension was initialized
1192
- # from custom coords, but dimension length was not
1193
- # stored in TensorConstant e.g by 'fixed' set to False
1194
-
1195
- warnings .warn (
1196
- f"You're changing the shape of a variable "
1197
- f"in the '{ dname } ' dimension which was initialized "
1198
- f"from coords. Make sure to update the corresponding "
1199
- f"coords, otherwise you'll get shape issues." ,
1200
- ShapeWarning ,
1201
- )
1202
1197
else :
1203
1198
length_belongs_to = length_tensor .owner .inputs [0 ].owner .inputs [0 ]
1204
1199
if not isinstance (length_belongs_to , SharedVariable ):
@@ -1210,13 +1205,6 @@ def set_data(
1210
1205
actual = new_length ,
1211
1206
expected = old_length ,
1212
1207
)
1213
- if original_coords is not None :
1214
- if new_coords is None :
1215
- raise ValueError (
1216
- f"The '{ name } ' variable already had { len (original_coords )} coord values defined for "
1217
- f"its { dname } dimension. With the new values this dimension changes to length "
1218
- f"{ new_length } , so new coord values for the { dname } dimension are required."
1219
- )
1220
1208
if isinstance (length_tensor , ScalarSharedVariable ):
1221
1209
# Updating the shared variable resizes dependent nodes that use this dimension for their `size`.
1222
1210
length_tensor .set_value (new_length )
0 commit comments