@@ -458,10 +458,10 @@ def test_make_vector_fail(self):
458
458
res = MakeVector ("int32" )(a , b )
459
459
460
460
res = MakeVector ()(a )
461
- assert res .broadcastable == (True ,)
461
+ assert res .type . shape == (1 ,)
462
462
463
463
res = MakeVector ()()
464
- assert res .broadcastable == (False ,)
464
+ assert res .type . shape == (0 ,)
465
465
466
466
def test_infer_shape (self ):
467
467
adscal = dscalar ()
@@ -1665,18 +1665,18 @@ def test_broadcastable_flag_assignment_mixed_otheraxes(self):
1665
1665
a = self .shared (a_val , shape = (None , None , 1 ))
1666
1666
b = self .shared (b_val , shape = (1 , None , 1 ))
1667
1667
c = self .join_op (1 , a , b )
1668
- assert c .type .broadcastable [0 ] and c .type .broadcastable [2 ]
1669
- assert not c .type .broadcastable [1 ]
1668
+ assert c .type .shape [0 ] == 1 and c .type .shape [2 ] == 1
1669
+ assert c .type .shape [1 ] != 1
1670
1670
1671
1671
# Opt can remplace the int by an Aesara constant
1672
1672
c = self .join_op (constant (1 ), a , b )
1673
- assert c .type .broadcastable [0 ] and c .type .broadcastable [2 ]
1674
- assert not c .type .broadcastable [1 ]
1673
+ assert c .type .shape [0 ] == 1 and c .type .shape [2 ] == 1
1674
+ assert c .type .shape [1 ] != 1
1675
1675
1676
1676
# In case futur opt insert other useless stuff
1677
1677
c = self .join_op (cast (constant (1 ), dtype = "int32" ), a , b )
1678
- assert c .type .broadcastable [0 ] and c .type .broadcastable [2 ]
1679
- assert not c .type .broadcastable [1 ]
1678
+ assert c .type .shape [0 ] == 1 and c .type .shape [2 ] == 1
1679
+ assert c .type .shape [1 ] != 1
1680
1680
1681
1681
f = function ([], c , mode = self .mode )
1682
1682
topo = f .maker .fgraph .toposort ()
@@ -1703,7 +1703,7 @@ def test_broadcastable_flag_assignment_mixed_thisaxes(self):
1703
1703
a = self .shared (a_val , shape = (None , None , 1 ))
1704
1704
b = self .shared (b_val , shape = (1 , None , 1 ))
1705
1705
c = self .join_op (0 , a , b )
1706
- assert not c .type .broadcastable [0 ]
1706
+ assert c .type .shape [0 ] != 1
1707
1707
1708
1708
f = function ([], c , mode = self .mode )
1709
1709
topo = f .maker .fgraph .toposort ()
@@ -1736,7 +1736,7 @@ def test_broadcastable_flags_all_broadcastable_on_joinaxis(self):
1736
1736
a = self .shared (a_val , shape = (1 , None , 1 ))
1737
1737
b = self .shared (b_val , shape = (1 , None , 1 ))
1738
1738
c = self .join_op (0 , a , b )
1739
- assert not c .type .broadcastable [0 ]
1739
+ assert c .type .shape [0 ] != 1
1740
1740
1741
1741
f = function ([], c , mode = self .mode )
1742
1742
topo = f .maker .fgraph .toposort ()
@@ -1754,9 +1754,9 @@ def test_broadcastable_single_input_broadcastable_dimension(self):
1754
1754
a_val = rng .random ((1 , 4 , 1 )).astype (self .floatX )
1755
1755
a = self .shared (a_val , shape = (1 , None , 1 ))
1756
1756
b = self .join_op (0 , a )
1757
- assert b .type .broadcastable [0 ]
1758
- assert b .type .broadcastable [2 ]
1759
- assert not b .type .broadcastable [1 ]
1757
+ assert b .type .shape [0 ] == 1
1758
+ assert b .type .shape [2 ] == 1
1759
+ assert b .type .shape [1 ] != 1
1760
1760
1761
1761
f = function ([], b , mode = self .mode )
1762
1762
topo = f .maker .fgraph .toposort ()
@@ -1782,13 +1782,13 @@ def test_broadcastable_flags_many_dims_and_inputs(self):
1782
1782
d = TensorType (dtype = self .floatX , shape = (1 , None , 1 , 1 , None , 1 ))()
1783
1783
e = TensorType (dtype = self .floatX , shape = (1 , None , 1 , None , None , 1 ))()
1784
1784
f = self .join_op (0 , a , b , c , d , e )
1785
- fb = f .type .broadcastable
1785
+ fb = tuple ( s == 1 for s in f .type .shape )
1786
1786
assert not fb [0 ] and fb [1 ] and fb [2 ] and fb [3 ] and not fb [4 ] and fb [5 ]
1787
1787
g = self .join_op (1 , a , b , c , d , e )
1788
- gb = g .type .broadcastable
1788
+ gb = tuple ( s == 1 for s in g .type .shape )
1789
1789
assert gb [0 ] and not gb [1 ] and gb [2 ] and gb [3 ] and not gb [4 ] and gb [5 ]
1790
1790
h = self .join_op (4 , a , b , c , d , e )
1791
- hb = h .type .broadcastable
1791
+ hb = tuple ( s == 1 for s in h .type .shape )
1792
1792
assert hb [0 ] and hb [1 ] and hb [2 ] and hb [3 ] and not hb [4 ] and hb [5 ]
1793
1793
1794
1794
f = function ([a , b , c , d , e ], f , mode = self .mode )
@@ -1981,8 +1981,8 @@ def test_TensorFromScalar():
1981
1981
s = aes .constant (56 )
1982
1982
t = tensor_from_scalar (s )
1983
1983
assert t .owner .op is tensor_from_scalar
1984
- assert t .type .broadcastable == (), t . type . broadcastable
1985
- assert t .type .ndim == 0 , t . type . ndim
1984
+ assert t .type .shape == ()
1985
+ assert t .type .ndim == 0
1986
1986
assert t .type .dtype == s .type .dtype
1987
1987
1988
1988
v = eval_outputs ([t ])
@@ -2129,23 +2129,23 @@ def test_flatten_broadcastable():
2129
2129
2130
2130
inp = TensorType ("float64" , shape = (None , None , None , None ))()
2131
2131
out = flatten (inp , ndim = 2 )
2132
- assert out .broadcastable == (False , False )
2132
+ assert out .type . shape == (None , None )
2133
2133
2134
2134
inp = TensorType ("float64" , shape = (None , None , None , 1 ))()
2135
2135
out = flatten (inp , ndim = 2 )
2136
- assert out .broadcastable == (False , False )
2136
+ assert out .type . shape == (None , None )
2137
2137
2138
2138
inp = TensorType ("float64" , shape = (None , 1 , None , 1 ))()
2139
2139
out = flatten (inp , ndim = 2 )
2140
- assert out .broadcastable == (False , False )
2140
+ assert out .type . shape == (None , None )
2141
2141
2142
2142
inp = TensorType ("float64" , shape = (None , 1 , 1 , 1 ))()
2143
2143
out = flatten (inp , ndim = 2 )
2144
- assert out .broadcastable == (False , True )
2144
+ assert out .type . shape == (None , 1 )
2145
2145
2146
2146
inp = TensorType ("float64" , shape = (1 , None , 1 , 1 ))()
2147
2147
out = flatten (inp , ndim = 3 )
2148
- assert out .broadcastable == (True , False , True )
2148
+ assert out .type . shape == (1 , None , 1 )
2149
2149
2150
2150
2151
2151
def test_flatten_ndim_invalid ():
@@ -2938,8 +2938,8 @@ def permute_fixed(s_input):
2938
2938
2939
2939
def test_3b_2 (self ):
2940
2940
# Test permute_row_elements on a more complex broadcasting pattern:
2941
- # input.type.broadcastable = (False, True, False ),
2942
- # p.type.broadcastable = (False, False ).
2941
+ # input.type.shape = (None, 1, None ),
2942
+ # p.type.shape = (None, None ).
2943
2943
2944
2944
input = TensorType ("floatX" , shape = (None , 1 , None ))()
2945
2945
p = imatrix ()
@@ -4046,7 +4046,7 @@ def test_broadcasted(self):
4046
4046
B = np .asarray (np .random .random ((4 , 1 )), dtype = "float32" )
4047
4047
for m in self .modes :
4048
4048
f = function ([a , b ], choose (a , b , mode = m ))
4049
- assert choose (a , b , mode = m ).broadcastable [0 ]
4049
+ assert choose (a , b , mode = m ).type . shape [0 ] == 1
4050
4050
t_c = f (A , B )
4051
4051
n_c = np .choose (A , B , mode = m )
4052
4052
assert np .allclose (t_c , n_c )
0 commit comments