Hi!
I am trying to implement 2-D marginalized GMM as follows,
p(x_n | \pi, \mu, \sigma) = \sum_{i=1}^K \pi_k \text{Normal}(x_n | \mu_k, \tau_k)
p(\pi) = \text{Dirichlet} (\pi | \alpha \mathbf{1}_k)
p(\mu_k) = \text{Normal}(\mu_k | 0,I)
p(\tau_k ) = \text{Gamma} (\tau_k | a, b)
My data has shape
X.shape
(500,2)
According to an example in pymc3.tests/text_mixture.py
def test_normal_mixture_nd(self):
nd, ncomp = 3, 5
with Model() as model0:
mus = Normal('mus', shape=(nd, ncomp))
taus = Gamma('taus', alpha=1, beta=1, shape=(nd, ncomp))
ws = Dirichlet('ws', np.ones(ncomp))
mixture0 = NormalMixture('m', w=ws, mu=mus, tau=taus, shape=nd)
with Model() as model1:
mus = Normal('mus', shape=(nd, ncomp))
taus = Gamma('taus', alpha=1, beta=1, shape=(nd, ncomp))
ws = Dirichlet('ws', np.ones(ncomp))
comp_dist = [Normal.dist(mu=mus[:, i], tau=taus[:, i])
for i in range(ncomp)]
mixture1 = Mixture('m', w=ws, comp_dists=comp_dist, shape=nd)
testpoint = model0.test_point
testpoint['mus'] = np.random.randn(nd, ncomp)
assert_allclose(model0.logp(testpoint), model1.logp(testpoint))
assert_allclose(mixture0.logp(testpoint), mixture1.logp(testpoint))
I create the model as follows:
# set up model
nd = 2
ncomp = 3
with pm.Model() as model0:
mus = pm.Normal('mus', shape=(nd, ncomp))
taus = pm.Gamma('taus', alpha=1, beta=1, shape=(nd, ncomp))
ws = pm.Dirichlet('ws', np.ones(ncomp))
mixture0 = pm.NormalMixture('m', w=ws, mu=mus, tau=taus, observed=X)
But it throws out error saying
Input dimension mis-match. (input[0].shape[0] = 500, input[1].shape[0] = 2)
Could you guys provide some help for this problem? Many thanks!