I’m using v5.I’m using normal likelihood, but I got an error and I don’t know why.My codes are
from pytensor.tensor import conv
def Ft(A,sigma2,mu2,a):
t_min=0
t_max=(t4_s.shape[0])*(t4_s[1]-t4_s[0])
num=t4_s.shape[0]
t=np.linspace(t_min,t_max,num)
tau=1/sigma2**2
ft1=A*pm.math.sqrt(tau/(2*np.pi))*pm.math.exp(-tau/2*(t+a-mu2)**2)
IRF1=IRF[None,None,:]
ft2=ft1[None,None,:]
ft=(conv.causal_conv1d(ft2, IRF1, filter_shape=(1,1,IRF1.shape[2]))).squeeze()
print(type(ft1))
print(ft1.type)
print(type(ft1[None, None, :]))
print(ft1[None, None, :].type)
print(type(ft))
print(ft.type)
print(ft)
return ft
#%%
with pm.Model() as final_model:
amp = pm.Uniform('amp',lower=-1.0,upper=0)
mu1 = pm.Uniform('mu1', lower=-50,upper=50)
sigma1 = pm.Uniform('sigma1',lower=0,upper=50)
a1=pm.Uniform('a1',lower=-10,upper=10)
y_observed=pm.Normal(
"y_observed",
mu= Ft(amp,sigma1,mu1,a1),
sigma=noise_sig,
observed=v4_s,
)
output = pm.Deterministic('output',Ft(amp,sigma1,mu1,a1))
prior = pm.sample_prior_predictive()
posterior_f = pm.sample(draws =500, target_accept = 0.9,chains=4,cores=4)
posterior_f = pm.sample_posterior_predictive(posterior_f)
az.plot_trace(posterior_f, var_names = ['amp','mu1','sigma1','a1'])
only_background=az.summary(posterior_f, var_names = ['amp','mu1','sigma1','a1'])
I got error
ValueError: Can not extract posterior from Inference data with groups:
> posterior_predictive
> observed_data! See /Users/majunfei/anaconda3/lib/python3.11/site-packages/arviz/data/converters.py for other conversion utilities.
I think this may caused by passing parameters ,so I print every parameters in the function which uses for cauculate mu.I got:
print(type(ft1))
> <class 'pytensor.tensor.var.TensorVariable'>
print(ft1.type)
> Vector(float64, shape=(5000,))
print(type(ft1[None, None, :]))
> <class 'pytensor.tensor.var.TensorVariable'>
print(ft1[None, None, :].type)
> Tensor3(float64, shape=(1, 1, 5000))
print(type(ft))
> <class 'pytensor.tensor.var.TensorVariable'>
print(ft.type)
> Vector(float64, shape=(?,))
print(ft)
> DropDims{axes=[0, 1]}.0