Behaviour of Gibbs kernel seems off

Hi @bwengals so the solution you presented trains ok but - prediction fails as I think it is not able to extrapolate the lengthscale function to X_new points and we need that as the conditional distribution needs to compute the cross covariance terms like K_{*n} → covar between new inputs and training.

  X = np.linspace(-1, 1, 100)[:,None]
    y = (X**3 - X**2 + X)
    
    X_new = np.linspace(-1, 1, 1000)[:,None]

    with pm.Model() as model:
    
        eta_prior = pm.Gamma('eta_prior', alpha=2, beta=0.75)
        l_prior = pm.Gamma('l_prior', alpha=2, beta=0.75)
        base_cov = eta_prior**2 * pm.gp.cov.ExpQuad(1, ls=l_prior)
        base_gp = pm.gp.Latent(cov_func=base_cov) 
        log_wf = base_gp.prior("log_wf", X=X)
        wf = pm.Deterministic("wf", pm.math.exp(log_wf))
        
        def warp_gp_func(x, wf):
            return wf
        
        cov = Gibbs(1, warp_gp_func, args=(wf,))
        
        K = pm.Deterministic("K", cov(X))
        
        gp = pm.gp.Marginal(cov_func=cov)
        #Xu = pm.gp.util.kmeans_inducing_points(20, X)
        #f = gp.prior("f", X=X)
        
        #sigma = pm.Gamma('sigma', alpha=1, beta=2)
        #trace_prior = pm.sample(draws=500, return_inferencedata=False)
        
        y_ = gp.marginal_likelihood("y", X=X, y=y, noise=0.01)
        
        trace = pm.sample(draws=100, tune=500, chains=1)
        
    with model:
         f_pred = gp.conditional("f_pred", X_new)
         pred_samples = pm.sample_posterior_predictive(trace, vars=[f_pred], samples=1000)
Traceback (most recent call last):

  Input In [95] in <cell line: 1>
    f_pred = gp.conditional("f_pred", X_new)

  File ~/anaconda3/lib/python3.8/site-packages/pymc3/gp/gp.py:527 in conditional
    mu, cov = self._build_conditional(Xnew, pred_noise, False, *givens)

  File ~/anaconda3/lib/python3.8/site-packages/pymc3/gp/gp.py:473 in _build_conditional
    Kxs = self.cov_func(X, Xnew)

  File ~/anaconda3/lib/python3.8/site-packages/pymc3/gp/cov.py:82 in __call__
    return self.full(X, Xs)

  File ~/Desktop/Workspace/Kernel_Learning_Latent_GPs/kernels/gibbs1d_pymc3:67 in full
    return at.sqrt((2.0 * at.outer(rx, rz)) / (rx2 + rz2)) * at.exp(-1.0 * r2 / (rx2 + rz2))

  File ~/anaconda3/lib/python3.8/site-packages/theano/tensor/var.py:170 in __truediv__
    return theano.tensor.basic.true_div(self, other)

  File ~/anaconda3/lib/python3.8/site-packages/theano/graph/op.py:253 in __call__
    compute_test_value(node)

  File ~/anaconda3/lib/python3.8/site-packages/theano/graph/op.py:130 in compute_test_value
    required = thunk()

  File ~/anaconda3/lib/python3.8/site-packages/theano/graph/op.py:606 in rval
    thunk()

  File ~/anaconda3/lib/python3.8/site-packages/theano/link/c/basic.py:1771 in __call__
    raise exc_value.with_traceback(exc_trace)

ValueError: Input dimension mis-match. (input[0].shape[1] = 1000, input[1].shape[1] = 100)
1 Like