|
5 | 5 | import pytensor
|
6 | 6 | import pytensor.tensor as pt
|
7 | 7 | from pytensor.compile.mode import OPT_FAST_RUN, Mode
|
| 8 | +from pytensor.graph import vectorize_graph |
8 | 9 | from pytensor.graph.basic import Constant, equal_computations
|
9 | 10 | from pytensor.raise_op import Assert, CheckAndRaise, assert_op
|
10 | 11 | from pytensor.scalar.basic import ScalarType, float64
|
11 | 12 | from pytensor.sparse import as_sparse_variable
|
| 13 | +from pytensor.tensor.basic import second |
| 14 | +from pytensor.tensor.elemwise import DimShuffle |
12 | 15 | from tests import unittest_tools as utt
|
13 | 16 |
|
14 | 17 |
|
@@ -184,3 +187,68 @@ def test_CheckAndRaise_sparse_variable():
|
184 | 187 | a2 = check_and_raise(aspe1, aspe2.sum() > 2)
|
185 | 188 | with pytest.raises(ValueError, match="sparse_check"):
|
186 | 189 | a2.sum().eval()
|
| 190 | + |
| 191 | + |
| 192 | +@pytensor.config.change_flags(cxx="") # For speed-up |
| 193 | +def test_vectorize(): |
| 194 | + floatX = pytensor.config.floatX |
| 195 | + x = pt.vector("x") |
| 196 | + y = pt.vector("y") |
| 197 | + cond = pt.all(y >= 0) |
| 198 | + out = assert_op(x, cond) |
| 199 | + |
| 200 | + batch_x = pt.matrix("batch_x", shape=(2, None)) |
| 201 | + batch_y = pt.matrix("batch_y", shape=(2, None)) |
| 202 | + |
| 203 | + test_x = np.arange(3).astype(floatX) |
| 204 | + test_y = np.arange(4).astype(floatX) |
| 205 | + test_batch_x = np.arange(6).reshape(2, 3).astype(floatX) |
| 206 | + test_batch_y = np.arange(8).reshape(2, 4).astype(floatX) |
| 207 | + |
| 208 | + # Only x is batched |
| 209 | + vect_out = vectorize_graph(out, {x: batch_x, y: y}) |
| 210 | + assert vect_out.type.shape == (2, None) |
| 211 | + assert isinstance(vect_out.owner.op, CheckAndRaise) |
| 212 | + np.testing.assert_array_equal( |
| 213 | + vect_out.eval({batch_x: test_batch_x, y: test_y}), |
| 214 | + test_batch_x, |
| 215 | + ) |
| 216 | + with pytest.raises(AssertionError): |
| 217 | + vect_out.eval({batch_x: test_batch_x, y: -test_y}) |
| 218 | + |
| 219 | + # Only y is batched |
| 220 | + vect_out = vectorize_graph(out, {x: x, y: batch_y}) |
| 221 | + assert vect_out.type.shape == (2, None) |
| 222 | + assert vect_out.owner.op == second # broadcast |
| 223 | + assert isinstance(vect_out.owner.inputs[1].owner.op, DimShuffle) |
| 224 | + assert isinstance(vect_out.owner.inputs[1].owner.inputs[0].owner.op, CheckAndRaise) |
| 225 | + np.testing.assert_array_equal( |
| 226 | + vect_out.eval({x: test_x, batch_y: test_batch_y}), |
| 227 | + np.broadcast_to(test_x, (2, *test_x.shape)), |
| 228 | + ) |
| 229 | + with pytest.raises(AssertionError): |
| 230 | + vect_out.eval({x: test_x, batch_y: -test_batch_y}) |
| 231 | + |
| 232 | + # Both x, and y are batched |
| 233 | + vect_out = vectorize_graph(out, {x: batch_x, y: batch_y}) |
| 234 | + assert vect_out.type.shape == (2, None) |
| 235 | + assert vect_out.owner.op == second |
| 236 | + assert isinstance(vect_out.owner.inputs[1].owner.op, CheckAndRaise) |
| 237 | + np.testing.assert_array_equal( |
| 238 | + vect_out.eval({batch_x: test_batch_x, batch_y: test_batch_y}), |
| 239 | + test_batch_x, |
| 240 | + ) |
| 241 | + with pytest.raises(AssertionError): |
| 242 | + vect_out.eval({batch_x: test_batch_x, batch_y: -test_batch_y}) |
| 243 | + |
| 244 | + # Both x, and y are batched and broadcast each other |
| 245 | + vect_out = vectorize_graph(out, {x: batch_x[:, None, :], y: batch_y[None, :, :]}) |
| 246 | + assert vect_out.type.shape == (2, 2, None) |
| 247 | + assert vect_out.owner.op == second |
| 248 | + assert isinstance(vect_out.owner.inputs[1].owner.op, CheckAndRaise) |
| 249 | + np.testing.assert_array_equal( |
| 250 | + vect_out.eval({batch_x: test_batch_x, batch_y: test_batch_y}), |
| 251 | + np.broadcast_to(test_batch_x[:, None, :], (2, *test_batch_x.shape)), |
| 252 | + ) |
| 253 | + with pytest.raises(AssertionError): |
| 254 | + vect_out.eval({batch_x: test_batch_x, batch_y: -test_batch_y}) |
0 commit comments