@@ -2241,16 +2241,19 @@ def test_compute_test_value_grad():
2241
2241
"""
2242
2242
See https://groups.google.com/d/msg/theano-users/fAP3i2CbskQ/3OgBf4yjqiQJ
2243
2243
"""
2244
- # WEIGHT = np.array([1, 2, 1, 3, 4, 1, 5, 6, 1, 7, 8, 1], dtype="float32")
2244
+ WEIGHT = np .array ([1 , 2 , 1 , 3 , 4 , 1 , 5 , 6 , 1 , 7 , 8 , 1 ], dtype = "float32" )
2245
2245
2246
2246
with config .change_flags (exception_verbosity = "high" ):
2247
2247
W_flat = fvector (name = "W" )
2248
+ W_flat .tag .test_value = WEIGHT
2248
2249
W = W_flat .reshape ((2 , 2 , 3 ))
2249
2250
2250
2251
outputs_mi = pt .as_tensor_variable (np .asarray (0 , dtype = "float32" ))
2252
+ outputs_mi .tag .test_value = np .asarray (0 , dtype = "float32" )
2251
2253
2252
2254
def loss_mi (mi , sum_mi , W ):
2253
2255
outputs_ti = pt .as_tensor_variable (np .asarray (0 , dtype = "float32" ))
2256
+ outputs_ti .tag .test_value = np .asarray (0 , dtype = "float32" )
2254
2257
2255
2258
def loss_ti (ti , sum_ti , mi , W ):
2256
2259
return W .sum ().sum ().sum () + sum_ti
@@ -2281,23 +2284,25 @@ def test_compute_test_value_grad_cast():
2281
2284
2282
2285
See https://groups.google.com/d/topic/theano-users/o4jK9xDe5WI/discussion
2283
2286
"""
2284
- h = matrix ("h" )
2285
2287
with pytest .warns (FutureWarning ):
2288
+ h = matrix ("h" )
2286
2289
h .tag .test_value = np .array ([[1 , 2 , 3 , 4 ], [5 , 6 , 7 , 8 ]], dtype = config .floatX )
2287
2290
2288
- w = shared (
2289
- np .random .default_rng (utt .fetch_seed ()).random ((4 , 3 )).astype (config .floatX ),
2290
- name = "w" ,
2291
- )
2291
+ w = shared (
2292
+ np .random .default_rng (utt .fetch_seed ())
2293
+ .random ((4 , 3 ))
2294
+ .astype (config .floatX ),
2295
+ name = "w" ,
2296
+ )
2292
2297
2293
- outputs , _ = scan (
2294
- lambda i , h , w : (dot (h [i ], w ), i ),
2295
- outputs_info = [None , 0 ],
2296
- non_sequences = [h , w ],
2297
- n_steps = 3 ,
2298
- )
2298
+ outputs , _ = scan (
2299
+ lambda i , h , w : (dot (h [i ], w ), i ),
2300
+ outputs_info = [None , 0 ],
2301
+ non_sequences = [h , w ],
2302
+ n_steps = 3 ,
2303
+ )
2299
2304
2300
- grad (outputs [0 ].sum (), w )
2305
+ grad (outputs [0 ].sum (), w )
2301
2306
2302
2307
2303
2308
def test_constant_folding_n_steps ():
0 commit comments