I tried also this solution, but still it gives me some error when computing the logp of a vector that has negative entries (while I expected it to return zero probability). I tried specifying the logp as:
def mvlognormal_logp(value, mu, cov):
res = pt.switch(pt.gt(value, 0), pt.log(value), -np.inf)
return pm.MvNormal.logp(res, mu, cov)
or as
def mvlognormal_logp(value, mu, cov, *args, **kwargs):
res = pt.switch(pt.gt(value, 0), pt.log(value), -np.inf)
return pm.logp(pm.MvNormal.dist(mu, cov), res)
but still, this gives some errors. I am attaching here the output
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[33], line 36
32 bip_fem.set_mcmc_model(prior_type, prior_kwargs, noise_type, noise_kwargs)
34 with bip_fem.mcmc:
35 trace_fem = pm.sample(
---> 36 DRAWS, tune=TUNE, step=Adaptive_Metropolis(**metropolis_kwargs), chains=CHAINS
37 )
38 trace_fem.to_netcdf(path+".nc")
File c:\Users\ivanb\Documents\GitHub\BayesianSurrogateModeling_SemesterProject\src\utils\mcmc_sampler.py:24, in Adaptive_Metropolis.__init__(self, vars, S, proposal_dist, scaling, tune, tune_interval, model, mode, elemwise_update, **kwargs)
11 def __init__(
12 self,
13 vars=None,
(...)
22 **kwargs
23 ):
---> 24 super().__init__(
25 vars, S, proposal_dist, scaling, tune, tune_interval, model, mode, **kwargs
26 )
27 self.elemwise_update = elemwise_update
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\step_methods\metropolis.py:225, in Metropolis.__init__(self, vars, S, proposal_dist, scaling, tune, tune_interval, model, mode, **kwargs)
222 self.mode = mode
224 shared = pm.make_shared_replacements(initial_values, vars, model)
--> 225 self.delta_logp = delta_logp(initial_values, model.logp(), vars, shared)
226 super().__init__(vars, shared)
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\model.py:759, in Model.logp(self, vars, jacobian, sum)
757 rv_logps: List[TensorVariable] = []
758 if rvs:
--> 759 rv_logps = joint_logp(
760 rvs=rvs,
761 rvs_to_values=self.rvs_to_values,
762 rvs_to_transforms=self.rvs_to_transforms,
763 jacobian=jacobian,
764 )
765 assert isinstance(rv_logps, list)
767 # Replace random variables by their value variables in potential terms
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\logprob\basic.py:319, in joint_logp(rvs, rvs_to_values, rvs_to_transforms, jacobian, **kwargs)
315 if values_to_transforms:
316 # There seems to be an incorrect type hint in TransformValuesRewrite
317 transform_rewrite = TransformValuesRewrite(values_to_transforms) # type: ignore
--> 319 temp_logp_terms = factorized_joint_logprob(
320 rvs_to_values,
321 extra_rewrites=transform_rewrite,
322 use_jacobian=jacobian,
323 **kwargs,
324 )
326 # The function returns the logp for every single value term we provided to it.
327 # This includes the extra values we plugged in above, so we filter those we
328 # actually wanted in the same order they were given in.
329 logp_terms = {}
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\logprob\basic.py:209, in factorized_joint_logprob(rv_values, warn_missing_rvs, ir_rewriter, extra_rewrites, **kwargs)
206 while q:
207 node = q.popleft()
--> 209 outputs = get_measurable_outputs(node.op, node)
211 if not outputs:
212 continue
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\logprob\abstract.py:164, in get_measurable_outputs(op, node)
162 """Return only the outputs that are measurable."""
163 if isinstance(op, MeasurableVariable):
--> 164 return _get_measurable_outputs(op, node)
165 else:
166 return []
File c:\Users\ivanb\anaconda3\envs\bayesiansurrogatemodeling\lib\functools.py:889, in singledispatch.<locals>.wrapper(*args, **kw)
885 if not args:
886 raise TypeError(f'{funcname} requires at least '
887 '1 positional argument')
--> 889 return dispatch(args[0].__class__)(*args, **kw)
File ~\AppData\Roaming\Python\Python310\site-packages\pymc\distributions\distribution.py:410, in _get_measurable_outputs_symbolic_random_variable(op, node)
402 @_get_measurable_outputs.register(SymbolicRandomVariable)
403 def _get_measurable_outputs_symbolic_random_variable(op, node):
404 # This tells PyMC that any non RandomType outputs are measurable
(...)
407 # In the rare case this is not what one wants, a specialized _get_measuarable_outputs
408 # can dispatch for a subclassed Op
409 if op.default_output is not None:
--> 410 return [node.default_output()]
412 # Otherwise assume that any outputs that are not of RandomType are measurable
413 return [out for out in node.outputs if not isinstance(out.type, RandomType)]
File ~\AppData\Roaming\Python\Python310\site-packages\pytensor\graph\basic.py:200, in Apply.default_output(self)
198 raise ValueError(f"{self.op}.default_output should be an int or long")
199 elif do < 0 or do >= len(self.outputs):
--> 200 raise ValueError(f"{self.op}.default_output is out of range.")
201 return self.outputs[do]
ValueError: CustomSymbolicDistRV_y{inline=True}.default_output is out of range.