Recently PyMC has given me the error “The truth value of an array with more than one element is ambiguous. Use a.any() or a.all().” at the end of sampling. The error appears to only happen sometimes, and I am on the latest version of PyMC (5.10.4).
The error path is shown below:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-16-9baedeb83496> in <cell line: 13>()
11 NUTSpars = {"target_accept": 0.8}
12
---> 13 trace = dive.sample(model_tikh, MCMCparameters, seed=seed, NUTSpars=NUTSpars)
5 frames
/content/dive/dive/models.py in sample(model_dic, MCMCparameters, steporder, NUTSpars, seed)
365
366 # Perform MCMC sampling
--> 367 idata = pm.sample(model=model, step=step, random_seed=seed, **MCMCparameters)
368
369 # Remove undesired variables
/usr/local/lib/python3.10/dist-packages/pymc/sampling/mcmc.py in sample(draws, tune, chains, cores, random_seed, progressbar, step, nuts_sampler, initvals, init, jitter_max_retries, n_init, trace, discard_tuned_samples, compute_convergence_checks, keep_warning_stat, return_inferencedata, idata_kwargs, nuts_sampler_kwargs, callback, mp_ctx, model, **kwargs)
789
790 t_start = time.time()
--> 791 if parallel:
792 _log.info(f"Multiprocess sampling ({chains} chains in {cores} jobs)")
793 _print_step_hierarchy(step)
/usr/local/lib/python3.10/dist-packages/pymc/sampling/mcmc.py in _sample_return(run, traces, tune, t_sampling, discard_tuned_samples, compute_convergence_checks, return_inferencedata, keep_warning_stat, idata_kwargs, model)
860 stat = mtrace._straces[0].get_sampler_stats("tune", sampler_idx=0)
861 stat = tuple(stat)
--> 862 n_tune = stat.count(True)
863 n_draws = stat.count(False)
864 else:
/usr/local/lib/python3.10/dist-packages/pymc/stats/convergence.py in run_convergence_checks(idata, model)
122 warnings.append(warn)
123
--> 124 warnings += warn_divergences(idata)
125 warnings += warn_treedepth(idata)
126
/usr/local/lib/python3.10/dist-packages/pymc/stats/convergence.py in warn_treedepth(idata)
161
162 warnings = []
--> 163 for c in rmtd.chain:
164 if sum(rmtd.sel(chain=c)) / rmtd.sizes["draw"] > 0.05:
165 warnings.append(
/usr/local/lib/python3.10/dist-packages/xarray/core/common.py in __bool__(self)
151
152 def __bool__(self: Any) -> bool:
--> 153 return bool(self.values)
154
155 def __float__(self: Any) -> float:
ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
The error says it occurs when PyMC adds warn_divergences()
, but there were no divergences in the sampled chains.
Below is a diagram and selected snippet of my model code:
with pm.Model() as model:
tau = pm.Gamma('tau', alpha=tau_prior[0], beta=tau_prior[1], initval=1.3)
sigma = pm.Deterministic('sigma', 1/np.sqrt(tau)) # for reporting
delta = pm.Gamma('delta', alpha=delta_prior[0], beta=delta_prior[1], initval=1.02)
lg_alpha = pm.Deterministic('lg_alpha', np.log10(np.sqrt(delta/tau))) # for reporting
P = pm.MvNormal('P', shape=len(r), mu=np.ones(len(r))/(dr*len(r)), tau=LtL)
constraint = (P >= 0).all()
potential = pm.Potential("P_nonnegative", pm.math.log(pm.math.switch(constraint, 1, 0)))
Vmodel = pm.math.dot(K0*dr,P)
b = pm.Beta('b', alpha=7.5, beta=1.65) # b = V0(1-lamb)
c = pm.Beta('c', alpha=7.75, beta=2.6) # c = V0*lamb
Vmodel = b + c*Vmodel
# deterministic lamb and V0 for reporting
V0 = pm.Deterministic('V0', b+c*(pm.math.sum(P)*(r[1]-r[0]))) # V0 = b+c after normalization
lamb = pm.Deterministic('lamb', c*(pm.math.sum(P)*(r[1]-r[0]))/V0) # lamb = c/(b+c) after norm.
Bend = pm.Beta("Bend", alpha=1.0, beta=1.5)
k = pm.Deterministic('k', -np.log(Bend)/t[-1]) # for reporting
B = bg_exp(t,k)
Vmodel *= B
pm.Normal('V', mu=Vmodel, tau=tau, observed=Vdata)
Since the error is about multi-element arrays, I think it might have to do with P
or its constraint (which keeps it non-negative), which is an MvNormal.
Help would be greatly appreciated!