Hello,

I don’t manage to calibrate the sigma of the likelihood with the black-box method, I tried to include the variable sigma in the tensor vector theta but it’s not working (see the code below).

Does someone know how to fix this problem ?

```
import pymc3 as pm
import numpy as np
import theano.tensor as tt
import time as time
import seaborn as sns
import scipy.stats as st
ndraws = 1000
nburn = 200
chains=2
njobs=1
cores=2
x=np.arange(0,24,1)
data=x*10+2+st.norm.rvs(loc=0, scale=3, size=len(x))
sigma=5
prio_var1=8
prio_var2=5
sd_var1=3
sd_var2=3
def my_model(theta,x):
var1,var2,sigma0= theta
prediction=x*var1+var2
return prediction
def my_loglike(theta,x,data, sigma):
model = my_model(theta, x)
sigma0=theta[2]
data0=data
return -(0.5/sigma0**2)*np.sum((data0 - model)**2)
class LogLike(tt.Op):
"""
Specify what type of object will be passed and returned to the Op when it is
called. In our case we will be passing it a vector of values (the parameters
that define our model) and returning a single "scalar" value (the
log-likelihood)
"""
itypes = [tt.dvector] # expects a vector of parameter values when called
otypes = [tt.dscalar] # outputs a single scalar value (the log likelihood)
def __init__(self, loglike, data, x, sigma):
"""
Initialise the Op with various things that our log-likelihood function
requires. Below are the things that are needed in this particular
example.
Parameters
----------
loglike:
The log-likelihood (or whatever) function we've defined
data:
The "observed" data that our log-likelihood function takes in
x:
The dependent variable (aka 'x') that our model requires
sigma:
The noise standard deviation that our function requires.
"""
# add inputs as class attributes
self.likelihood = loglike
self.data = data
self.x = x
self.sigma = sigma
def perform(self, node, inputs, outputs):
# the method that is used when calling the Op
theta, = inputs # this will contain my variables
# call the log-likelihood function
logl = self.likelihood(theta, self.x, self.data, self.sigma)
outputs[0][0] = np.array(logl) # output the log-likelihood
# create our Op
logl = LogLike(my_loglike, data, x, sigma)
def my_mu(v):
return logl(v)
# use PyMC3 to sampler from log-likelihood
if __name__ == "__main__":
with pm.Model() as model1:
var1 = pm.Normal('var1', mu=prio_var1, sd=sd_var1)
var2 = pm.Normal('var2', mu=prio_var2, sd=sd_var2)
sigma0 = pm.Uniform('sigma0', lower=0.1, upper=10)
#sigma0 = pm.HalfNormal('sigma0', sd=4)
theta = tt.as_tensor_variable([var1, var2,sigma0])
pm.DensityDist('likelihood',my_mu , observed={'v': theta})#
step = pm.Slice()
trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True, chains=chains, step=step,cores=cores)
tracedf=pm.trace_to_dataframe(trace)
tim_end=time.process_time()
pm.traceplot(trace,varnames=['var1','var2','sigma0'])#,varnames=[var1,var2]
sns.pairplot(tracedf[['var1','var2','sigma0']])
pm.plot_posterior(trace,varnames=['var1','var2','sigma0'])
pm.autocorrplot(trace,varnames=['var1','var2','sigma0']);
pm.traceplot(trace,combined=True,priors=[var1.distribution, var2.distribution, sigma0.distribution])
```