This is really quite puzzling, the computation all seems correct, also if I construct the model by hand it all works fine…
class Ordered(pm.distributions.transforms.ElemwiseTransform):
name = "ordered"
def forward(self, y):
x = tt.zeros(y.shape)
x = tt.inc_subtensor(x[0], y[0])
x = tt.inc_subtensor(x[1:], tt.log(y[1:] - y[:-1]))
return x
def forward_val(self, x, point=None):
x, = pm.distributions.distribution.draw_values([x], point=point)
return self.forward(x)
def backward(self, x):
y = tt.zeros(x.shape)
y = tt.inc_subtensor(y[0], x[0])
y = tt.inc_subtensor(y[1:], tt.exp(x[1:]))
return tt.cumsum(y)
def jacobian_det(self, y):
return tt.sum(y[1:])
with pm.Model() as mtest:
x = pm.Flat('x_real', shape=2, testval=[2.,1.])
Order = Ordered()
pm.Potential('y1', Order.jacobian_det(x))
x1 = Order.backward(x)
Interval = pm.distributions.transforms.Interval(0., 1.)
x2 = pm.Deterministic('x_interval_ordered', Interval.backward(x1))
pm.Potential('y2', pm.Uniform.dist(0, 1).logp(x2)+Interval.jacobian_det(x1))
trtest = pm.sample(5000, init='adapt_diag')
_, ax = plt.subplots(1, 2, figsize=(10, 5))
for ivar, varname in enumerate(trtest.varnames):
ax[ivar].scatter(trtest[varname][:, 0], trtest[varname][:, 1], alpha=.01)
ax[ivar].set_title(varname)
plt.tight_layout();