LLVM ERROR: Unable to allocate section memory!

Sure!

from sklearn.preprocessing import PolynomialFeatures
import numpy as np
import pymc as pm
import os
os.environ["XLA_PYTHON_CLIENT_PREALLOCATE"]="false"
os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"]="platform"
for iteration in range(1000):
  print(f"iteration: {iteration}")
  x = np.repeat([[0.], [0.01], [0.02]], 10, axis=0)
  y = np.random.random((30, 1))
  poly = PolynomialFeatures(degree=2)
  x_poly = poly.fit_transform(x)
  sigma_obs = np.sqrt((1e-10 ** 4) + 0.01)
  with pm.Model():
      alpha = pm.Normal('alpha', mu=y[0][0], sigma=0.1)
      betas = pm.Normal('betas', mu=1, sigma=10, shape=(2 + 1,))
      sigma = pm.HalfNormal('sigma', sigma=1, testval=1.)
      mu = alpha + pm.math.dot(betas, x_poly.T)
      y_obs = pm.Normal('y_obs', mu=mu, sigma=sigma_obs, observed=y)
      trace = pm.sample(250, nuts_sampler="numpyro", tune=500, chains=5, target_accept=0.9,
                             return_inferencedata=False, random_seed=42)
  
      for x_level in [0.0, 0.01, 0.02]:
          x_poly = poly.transform(np.array(x_level).reshape(-1, 1))
          alpha_samples = trace.posterior['alpha'].values.flatten()
          beta_samples = trace.posterior['betas'].values.reshape(-1, 2 + 1)
          y_preds = np.dot(beta_samples, x_poly.T) + alpha_samples[:, None]
          mean_prediction = np.mean(y_preds, axis=0)
          lower_bound, upper_bound = np.percentile(y_preds, [2.5, 97.5], axis=0)
          print(f"mean_prediction: {mean_prediction[0]}, lower_bound: {lower_bound[0]}, upper_bound: {upper_bound[0]}")

I have to add that I am running this on a compute cluster and managing the resources by slurm. I run multiple of these jobs in parallel. Currently, I am checking whether this error also occurs on my local machine, while running just one job.