I ended up adding my own function to my pymc3/gp/cov.py file for this, in case anybody is interested. You will have to supply your own network matrix M (adjacency matrix of weights). Something like:
M = np.cov(X, rowvar=False, bias=True)
M = M - np.eye(len(M))*M.diagonal()
M = M - np.nansum(M,axis=0)*np.eye(len(M))
and then assign it as a variable when you call the function, along with hyperparameters sigmaf (signal variance) and ls (characteristic length-scale).
Σprior = pm.gp.cov.Diffusion(input_dim=X.shape[1],active_dims=np.arange(X.shape[1]),M=M,sigf=σf,ls=ℓ)
For the pymc3/gp/cov.py file:
class Diffusion(Covariance):
R"""
Random Walk (Diffusion Kernel) based on linear covariance.
W. Gregory
"""
def __init__(self, input_dim, M, sigf=None, ls=None, ls_inv=None, active_dims=None):
super().__init__(input_dim, active_dims)
self.ls = tt.as_tensor_variable(ls)
self.sigf = tt.as_tensor_variable(sigf)
self.M = tt.as_tensor_variable(M)
def full(self, X, Xs=None):
X, Xs = self._slice(X, Xs)
if Xs is None:
K = tt.dot(tt.dot(X,(self.sigf*tt.exp(self.M*self.ls))),tt.transpose(X)) #K(X,X) / K(X_*,X_*) - nxn(n*xn*)
else:
K = tt.dot(tt.dot(X,(self.sigf*tt.exp(self.M*self.ls))),tt.transpose(Xs)) #K(X,X_*) - nxn*
K2 = tt.patternbroadcast(K, (False, False))
return K2