You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Auto-assigningNUTSsampler...
InitializingNUTSusingjitter+adapt_diag...
---------------------------------------------------------------------------ValueErrorTraceback (mostrecentcalllast)
<ipython-input-10-ee5ade6fe7a9>in<module>7sd=pm.HalfCauchy("sd", beta=0.1)
8pm.MvNormal("ob", mu=np.zeros((D)), chol=sd*np.eye(D), observed=np.zeros((D)))
---->9idata=pm.sample(2000, tune=1500, return_inferencedata=True)
~/.local/lib/python3.8/site-packages/pymc3/sampling.pyinsample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, callback, return_inferencedata, idata_kwargs, mp_ctx, pickle_backend, **kwargs)
479# By default, try to use NUTS480_log.info("Auto-assigning NUTS sampler...")
-->481start_, step=init_nuts(
482init=init,
483chains=chains,
~/.local/lib/python3.8/site-packages/pymc3/sampling.pyininit_nuts(init, chains, n_init, model, random_seed, progressbar, **kwargs)
2168raiseValueError("Unknown initializer: {}.".format(init))
2169->2170step=pm.NUTS(potential=potential, model=model, **kwargs)
21712172returnstart, step~/.local/lib/python3.8/site-packages/pymc3/step_methods/hmc/nuts.pyin__init__(self, vars, max_treedepth, early_max_treedepth, **kwargs)
166`pm.sample`tothedesirednumberoftuningsteps.
167 """
--> 168 super().__init__(vars, **kwargs)
169
170 self.max_treedepth = max_treedepth
~/.local/lib/python3.8/site-packages/pymc3/step_methods/hmc/base_hmc.py in __init__(self, vars, scaling, step_scale, is_cov, model, blocked, potential, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **theano_kwargs)
91 vars = inputvars(vars)
92
---> 93 super().__init__(vars, blocked=blocked, model=model, dtype=dtype, **theano_kwargs)
94
95 self.adapt_step_size = adapt_step_size
~/.local/lib/python3.8/site-packages/pymc3/step_methods/arraystep.py in __init__(self, vars, model, blocked, dtype, **theano_kwargs)
241 self.blocked = blocked
242
--> 243 func = model.logp_dlogp_function(
244 vars, dtype=dtype, **theano_kwargs)
245
~/.local/lib/python3.8/site-packages/pymc3/model.py in logp_dlogp_function(self, grad_vars, **kwargs)
933 varnames = [var.name for var in grad_vars]
934 extra_vars = [var for var in self.free_RVs if var.name not in varnames]
--> 935 return ValueGradFunction(self.logpt, grad_vars, extra_vars, **kwargs)
936
937 @property
~/.local/lib/python3.8/site-packages/pymc3/model.py in __init__(self, cost, grad_vars, extra_vars, dtype, casting, **kwargs)
647 )
648
--> 649 grad = tt.grad(self._cost_joined, self._vars_joined)
650 grad.name = "__grad"
651
~/.local/lib/python3.8/site-packages/theano/gradient.py in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected, null_gradients)
602 assert g.type.dtype in tensor.float_dtypes
603
--> 604 rval = _populate_grad_dict(var_to_app_to_idx,
605 grad_dict, wrt, cost_name)
606
~/.local/lib/python3.8/site-packages/theano/gradient.py in _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name)
1369 return grad_dict[var]
1370
-> 1371 rval = [access_grad_cache(elem) for elem in wrt]
1372
1373 return rval
~/.local/lib/python3.8/site-packages/theano/gradient.py in <listcomp>(.0)
1369 return grad_dict[var]
1370
-> 1371 rval = [access_grad_cache(elem) for elem in wrt]
1372
1373 return rval
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_grad_cache(var)
1324 for idx in node_to_idx[node]:
1325
-> 1326 term = access_term_cache(node)[idx]
1327
1328 if not isinstance(term, gof.Variable):
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_term_cache(node)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in <listcomp>(.0)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_grad_cache(var)
1324 for idx in node_to_idx[node]:
1325
-> 1326 term = access_term_cache(node)[idx]
1327
1328 if not isinstance(term, gof.Variable):
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_term_cache(node)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in <listcomp>(.0)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_grad_cache(var)
1324 for idx in node_to_idx[node]:
1325
-> 1326 term = access_term_cache(node)[idx]
1327
1328 if not isinstance(term, gof.Variable):
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_term_cache(node)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in <listcomp>(.0)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_grad_cache(var)
1324 for idx in node_to_idx[node]:
1325
-> 1326 term = access_term_cache(node)[idx]
1327
1328 if not isinstance(term, gof.Variable):
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_term_cache(node)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in <listcomp>(.0)
1019 inputs = node.inputs
1020
-> 1021 output_grads = [access_grad_cache(var) for var in node.outputs]
1022
1023 # list of bools indicating if each output is connected to the cost
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_grad_cache(var)
1324 for idx in node_to_idx[node]:
1325
-> 1326 term = access_term_cache(node)[idx]
1327
1328 if not isinstance(term, gof.Variable):
~/.local/lib/python3.8/site-packages/theano/gradient.py in access_term_cache(node)
1159 str(g_shape))
1160
-> 1161 input_grads = node.op.L_op(inputs, node.outputs,
1162 new_output_grads)
1163
~/.local/lib/python3.8/site-packages/theano/gof/op.py in L_op(self, inputs, outputs, output_grads)
709
710 def L_op(self, inputs, outputs, output_grads):
--> 711 return self.grad(inputs, output_grads)
712
713 def R_op(self, inputs, eval_points):
~/.local/lib/python3.8/site-packages/theano/tensor/elemwise.py in grad(self, inp, grads)
303 return [inp[0].zeros_like(dtype=theano.config.floatX)]
304 else:
--> 305 return [DimShuffle(gz.type.broadcastable, grad_order)(
306 Elemwise(scalar.identity)(gz))]
307
~/.local/lib/python3.8/site-packages/theano/tensor/elemwise.py in __init__(self, input_broadcastable, new_order, inplace)
193 else:
194 # we cannot drop non-broadcastable dimensions
--> 195 raise ValueError(
196 "You cannot drop a non-broadcastable dimension.",
197 (input_broadcastable, new_order))
ValueError: ('You cannot drop a non-broadcastable dimension.', ((False, False), []))
Versions and main components
PyMC3 Version: 3.9.3
Theano Version: 1.0.5
Python Version: 3.8.5
Operating system: Linux
How did you install PyMC3: pip
The text was updated successfully, but these errors were encountered:
thjread
changed the title
pm.MvNormal gives You cannot drop a non-broadcastable dimension error when one-dimensional and chol is stochasticpm.MvNormal gives You cannot drop a non-broadcastable dimension error under certain one-dimensional situations
Sep 4, 2020
Description of your problem
Please provide a minimal, self-contained, and reproducible example.
gives
ValueError: ('You cannot drop a non-broadcastable dimension.', ((False, False), []))
.Any one of the following changes avoids the error:
D = 2
sd = 0.5
(if you add some random variable to avoid the "The model does not contain any free variables." error)sd = pm.HalfCauchy("sd", beta=0.1, shape=(1, 1))
sd*np.eye(D)
with(sd*np.eye(D))[:]
See this discussion on the forum.
Please provide the full traceback.
Versions and main components
The text was updated successfully, but these errors were encountered: