Thanks Junpeng,
I had some trouble with casting to theano tensors, but the new model below works OK
Q: are the two models identical? I’m getting more accurate results with model_oxen2 (tighter HPDI, mean is closer to ideal value)
with pm.Model() as model_oxen2:
# priors
p_cheat = pm.Beta('p_cheat', 2, 2) # prob drink tea if ox not stabled
p_drink = pm.Beta('p_drink', 2, 2) # prob drink tea if ox stabled
p_stabled = pm.Beta('p_stabled', 2, 2) # prob kid stabled ox
# ox status not observed, marginalize out ox / no-ox
pr_tea_ox = pm.Bernoulli.dist(p=p_drink)
pr_tea_noox = pm.Bernoulli.dist(p=p_cheat)
_w = T.stack([p_stabled, 1 - p_stabled])
_comp_dists = [pr_tea_ox, pr_tea_noox]
likelihood_s_not_obs = pm.Mixture('likelihood_s_not_obs', _w, comp_dists = _comp_dists, observed=tea_obs[s_not_obs_idx])
# ox status observed
_pi = pm.Deterministic('pi', s_obs[s_obs_idx]*p_drink+(1-s_obs[s_obs_idx])*p_cheat)
likelihood_tea = pm.Bernoulli('likelihood_tea', _pi, observed=tea_obs[s_obs_idx])
likelihood_s_obs = pm.Bernoulli('likelihood_s_obs', p_stabled, observed=s_obs[s_obs_idx])