Multivariate Linear Regression with correlated input variables

Is this a possible solution? The changes w.r.t. the above block of code are in the section immediately above the declaration of the likelihood.

with pm.Model() as model:
    # Priors for unknown model parameters 
    alpha = pm.Normal("alpha", mu=0, sigma=10)
    beta1 = pm.Normal("beta1", mu=0, sigma=10)
    beta2 = pm.Normal("beta2", mu=0, sigma=10)

    # Prior for covariance
    chol, corr, stds = pm.LKJCholeskyCov(
        "chol", n=2, eta=2.0, 
        sd_dist=pm.Exponential.dist(1.0), compute_corr=True
    )
    
    μ = pm.Normal("μ", 0., 10., shape=2, testval=test_values) 

    obs = pm.MvNormal("obs", μ, chol=chol, observed=observed_df)

    # Update
    x1_split, x2_split = tt.split(obs, [1, 1], n_splits=2, axis=1)
    x1_split = tt.squeeze(x1_split)[:, 0]
    x2_split = tt.squeeze(x2_split)[:, 0]

    X1 = pm.Normal("X1", mu=x1_split, sigma=x1_error, shape=len(x1))
    X2 = pm.Normal("X2", mu=x2_split, sigma=x2_error, shape=len(x2))    


     # Likelihood
     mu = alpha + beta1 * X1 + beta2 * X2
     Y_obs = pm.StudentT("Y_obs", mu=mu, sigma=y_error,
                        observed=y, nu=5)