It is similar to the code provided in the black-box likelihood tutorial, with the first simple example in the custom dist documentation.
def my_loglike1(theta, freq, data, sigma):
model = my_model1(theta, freq)
return -(0.5 / sigma**2) * np.sum((data - model) ** 2)
class LogLike(pt.Op):
itypes = [pt.dvector] # expects a vector of parameter values when called
otypes = [pt.dscalar] # outputs a single scalar value (the log likelihood)
def __init__(self, loglike, data, freq, sigma):
# add inputs as class attributes
self.likelihood = loglike
self.data = data
self.freq = freq
self.sigma = sigma
def perform(self, node, inputs, outputs):
# the method that is used when calling the Op
(theta,) = inputs # this will contain my variables
# call the log-likelihood function
logl = self.likelihood(theta, self.freq, self.data, self.sigma)
outputs[0][0] = np.array(logl) # output the log-likelihood
data = sigma * rng.normal(size=N) + truemodel
logl1 = LogLike(my_loglike1, data, freq, sigma)
def logp(value: TensorVariable, mu: TensorVariable) -> TensorVariable:
return -(value - mu)**2
# use PyMC to sampler from log-likelihood
if __name__ == '__main__':
with pm.Model() as BI_model1:
# uniform priors
d_c = pm.Uniform('d_c_model1',lower=0.001, upper=0.009)
a_c = pm.Uniform("a_c_model1", lower=0.015, upper=0.09)
mu = pm.Normal('mu',0,1)
# convert to a tensor vector
theta = pt.as_tensor_variable([d_c, a_c])
# use a Potential to "call" the Op and include it in the logp computation
pm.Potential("likelihood", logl1(theta))
pm.CustomDist('custom_dist',mu, logp=logp, observed=data)
# Use custom number of draws to replace the HMC based defaults
idata_mh = pm.smc.sample_smc(draws=100, cores=10, chains=4, return_inferencedata=True, idata_kwargs=dict(log_likelihood=True))