I am trying to calibrate the two rate constant (tauep and tauet) of my simple Enzyme model
dE/dt = tauep*B - tauet*E
while using time series data of B, it shows error. The whole code is given below
"
def enz_f(E, t, p):
B_t = np.interp(t, times, B_series)
return ((p[0] * B_t) - (p[1] * E[0]))
times = np.arange(0, 5, 1)
B_series = [125, 126, 127, 128, 129]
E0 = 150
sigma_obs = 2
true_params = [0.00012, 0.024]
tauep_true = 0.00012
tauet_true = 0.024
y_true = odeint(enz_f, [E0], t=times, args=(true_params,))
# Adding noise to the true values to simulate observations
yobs = np.random.normal(y_true, sigma_obs)
ode_model = DifferentialEquation(
func= enz_f,
times=times,
n_states=1,
n_theta=3,
t0=0
)
with pm.Model() as model:
# Priors for model parameters
sigma = pm.HalfCauchy("sigma", beta=1)
tauep = pm.LogNormal("tauep", mu=np.log(tauep_true), sigma=0.5)
tauet = pm.LogNormal("tauet", mu=np.log(tauet_true), sigma=0.5)
# Solve the ODE using the priors, with dynamic B
ode_solution = ode_model(y0=[E0], theta=[tauep, tauet])
# Define the likelihood function
Y = pm.Normal("Y", mu=ode_solution, sigma=sigma, observed=yobs)
# Sample from the prior
prior = pm.sample_prior_predictive()
# Sample from the posterior
trace = pm.sample(500, tune=1000, cores=3)
# Sample from the posterior predictive distribution
posterior_predictive = pm.sample_posterior_predictive(trace)
az.plot_trace(trace)
plt.show()
"
Can anyone suggest me where I am doing wrong? or is there a better way to do it?