PyMC simple tutorial returns ImportError

I was trying this example

but failed.

dfhogg = pd.DataFrame(
    np.array(
        [
            [1, 201, 592, 61, 9, -0.84],
            [2, 244, 401, 25, 4, 0.31],
            [3, 47, 583, 38, 11, 0.64],
            [4, 287, 402, 15, 7, -0.27],
            [5, 203, 495, 21, 5, -0.33],
            [6, 58, 173, 15, 9, 0.67],
            [7, 210, 479, 27, 4, -0.02],
            [8, 202, 504, 14, 4, -0.05],
            [9, 198, 510, 30, 11, -0.84],
            [10, 158, 416, 16, 7, -0.69],
            [11, 165, 393, 14, 5, 0.30],
            [12, 201, 442, 25, 5, -0.46],
            [13, 157, 317, 52, 5, -0.03],
            [14, 131, 311, 16, 6, 0.50],
            [15, 166, 400, 34, 6, 0.73],
            [16, 160, 337, 31, 5, -0.52],
            [17, 186, 423, 42, 9, 0.90],
            [18, 125, 334, 26, 8, 0.40],
            [19, 218, 533, 16, 6, -0.78],
            [20, 146, 344, 22, 5, -0.56],
        ]
    ),
    columns=["id", "x", "y", "sigma_y", "sigma_x", "rho_xy"],
)

dfhogg["id"] = dfhogg["id"].apply(lambda x: "p{}".format(int(x)))
dfhogg.set_index("id", inplace=True)

dfhoggs = (dfhogg[["x", "y"]] - dfhogg[["x", "y"]].mean(0)) / (2 * dfhogg[["x", "y"]].std(0))
dfhoggs["sigma_x"] = dfhogg["sigma_x"] / (2 * dfhogg["x"].std())
dfhoggs["sigma_y"] = dfhogg["sigma_y"] / (2 * dfhogg["y"].std())


coords = {"coefs": ["intercept", "slope"], "datapoint_id": dfhoggs.index}
with pm.Model(coords=coords) as mdl_studentt:

    # define weakly informative Normal priors to give Ridge regression
    beta = pm.Normal("beta", mu=0, sigma=10, dims="coefs")

    # define linear model
    y_est = beta[0] + beta[1] * dfhoggs["x"]

    # define prior for StudentT degrees of freedom
    # InverseGamma has nice properties:
    # it's continuous and has support x ∈ (0, inf)
    nu = pm.InverseGamma("nu", alpha=1, beta=1)

    # define Student T likelihood
    pm.StudentT(
        "y", mu=y_est, sigma=dfhoggs["sigma_y"], nu=nu, observed=dfhoggs["y"], dims="datapoint_id"
    )
    trc_studentt = pm.sample(
        tune=5000,
        draws=500,
        chains=4,
        cores=4,
        init="advi+adapt_diag",
        n_init=50000,
    )

I installed pymc with pip, version 5.2, on MBP 14" [2021, macOS 13.1, M1Pro(6P+2E/G16c/N16c/32G)].

I get the following error:

---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/lazylinker_c.py:79
     78         if version != actual_version:
---> 79             raise ImportError(
     80                 "Version check of the existing lazylinker compiled file."
     81                 f" Looking for version {version}, but found {actual_version}. "
     82                 f"Extra debug information: force_compile={force_compile}, _need_reload={_need_reload}"
     83             )
     84 except ImportError:

ImportError: Version check of the existing lazylinker compiled file. Looking for version 0.212, but found 0.211. Extra debug information: force_compile=False, _need_reload=True

During handling of the above exception, another exception occurred:

ImportError                               Traceback (most recent call last)
File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/lazylinker_c.py:100
     99     if version != actual_version:
--> 100         raise ImportError(
    101             "Version check of the existing lazylinker compiled file."
    102             f" Looking for version {version}, but found {actual_version}. "
    103             f"Extra debug information: force_compile={force_compile}, _need_reload={_need_reload}"
    104         )
    105 except ImportError:
    106     # It is useless to try to compile if there isn't any
    107     # compiler!  But we still want to try to load it, in case
    108     # the cache was copied from another computer.

ImportError: Version check of the existing lazylinker compiled file. Looking for version 0.212, but found 0.211. Extra debug information: force_compile=False, _need_reload=True

During handling of the above exception, another exception occurred:

AssertionError                            Traceback (most recent call last)
Cell In[65], line 19
     15 # define Student T likelihood
     16 pm.StudentT(
     17     "y", mu=y_est, sigma=dfhoggs["sigma_y"], nu=nu, observed=dfhoggs["y"], dims="datapoint_id"
     18 )
---> 19 trc_studentt = pm.sample(
     20     tune=5000,
     21     draws=500,
     22     chains=4,
     23     cores=4,
     24     init="advi+adapt_diag",
     25     n_init=50000,
     26 )

File ~/miniforge3/lib/python3.10/site-packages/pymc/sampling/mcmc.py:564, in sample(draws, tune, chains, cores, random_seed, progressbar, step, nuts_sampler, initvals, init, jitter_max_retries, n_init, trace, discard_tuned_samples, compute_convergence_checks, keep_warning_stat, return_inferencedata, idata_kwargs, nuts_sampler_kwargs, callback, mp_ctx, model, **kwargs)
    561         auto_nuts_init = False
    563 initial_points = None
--> 564 step = assign_step_methods(model, step, methods=pm.STEP_METHODS, step_kwargs=kwargs)
    566 if nuts_sampler != "pymc":
    567     if not isinstance(step, NUTS):

File ~/miniforge3/lib/python3.10/site-packages/pymc/sampling/mcmc.py:203, in assign_step_methods(model, step, methods, step_kwargs)
    195         selected = max(
    196             methods,
    197             key=lambda method, var=rv_var, has_gradient=has_gradient: method._competence(
    198                 var, has_gradient
    199             ),
    200         )
    201         selected_steps[selected].append(var)
--> 203 return instantiate_steppers(model, steps, selected_steps, step_kwargs)

File ~/miniforge3/lib/python3.10/site-packages/pymc/sampling/mcmc.py:116, in instantiate_steppers(model, steps, selected_steps, step_kwargs)
    114         args = step_kwargs.get(step_class.name, {})
    115         used_keys.add(step_class.name)
--> 116         step = step_class(vars=vars, model=model, **args)
    117         steps.append(step)
    119 unused_args = set(step_kwargs).difference(used_keys)

File ~/miniforge3/lib/python3.10/site-packages/pymc/step_methods/hmc/nuts.py:180, in NUTS.__init__(self, vars, max_treedepth, early_max_treedepth, **kwargs)
    122 def __init__(self, vars=None, max_treedepth=10, early_max_treedepth=8, **kwargs):
    123     r"""Set up the No-U-Turn sampler.
    124 
    125     Parameters
   (...)
    178     `pm.sample` to the desired number of tuning steps.
    179     """
--> 180     super().__init__(vars, **kwargs)
    182     self.max_treedepth = max_treedepth
    183     self.early_max_treedepth = early_max_treedepth

File ~/miniforge3/lib/python3.10/site-packages/pymc/step_methods/hmc/base_hmc.py:109, in BaseHMC.__init__(self, vars, scaling, step_scale, is_cov, model, blocked, potential, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **pytensor_kwargs)
    107 else:
    108     vars = get_value_vars_from_user_vars(vars, self._model)
--> 109 super().__init__(vars, blocked=blocked, model=self._model, dtype=dtype, **pytensor_kwargs)
    111 self.adapt_step_size = adapt_step_size
    112 self.Emax = Emax

File ~/miniforge3/lib/python3.10/site-packages/pymc/step_methods/arraystep.py:164, in GradientSharedStep.__init__(self, vars, model, blocked, dtype, logp_dlogp_func, **pytensor_kwargs)
    161 model = modelcontext(model)
    163 if logp_dlogp_func is None:
--> 164     func = model.logp_dlogp_function(vars, dtype=dtype, **pytensor_kwargs)
    165 else:
    166     func = logp_dlogp_func

File ~/miniforge3/lib/python3.10/site-packages/pymc/model.py:644, in Model.logp_dlogp_function(self, grad_vars, tempered, **kwargs)
    641     costs = [self.logp()]
    643 input_vars = {i for i in graph_inputs(costs) if not isinstance(i, Constant)}
--> 644 ip = self.initial_point(0)
    645 extra_vars_and_values = {
    646     var: ip[var.name]
    647     for var in self.value_vars
    648     if var in input_vars and var not in grad_vars
    649 }
    650 return ValueGradFunction(costs, grad_vars, extra_vars_and_values, **kwargs)

File ~/miniforge3/lib/python3.10/site-packages/pymc/model.py:1127, in Model.initial_point(self, random_seed)
   1114 def initial_point(self, random_seed: SeedSequenceSeed = None) -> Dict[str, np.ndarray]:
   1115     """Computes the initial point of the model.
   1116 
   1117     Parameters
   (...)
   1125         Maps names of transformed variables to numeric initial values in the transformed space.
   1126     """
-> 1127     fn = make_initial_point_fn(model=self, return_transformed=True)
   1128     return Point(fn(random_seed), model=self)

File ~/miniforge3/lib/python3.10/site-packages/pymc/initial_point.py:152, in make_initial_point_fn(model, overrides, jitter_rvs, default_strategy, return_transformed)
    149 # Replace original rng shared variables so that we don't mess with them
    150 # when calling the final seeded function
    151 initial_values = replace_rng_nodes(initial_values)
--> 152 func = compile_pymc(inputs=[], outputs=initial_values, mode=pytensor.compile.mode.FAST_COMPILE)
    154 varnames = []
    155 for var in model.free_RVs:

File ~/miniforge3/lib/python3.10/site-packages/pymc/pytensorf.py:1149, in compile_pymc(inputs, outputs, random_seed, mode, **kwargs)
   1147 opt_qry = mode.provided_optimizer.including("random_make_inplace", check_parameter_opt)
   1148 mode = Mode(linker=mode.linker, optimizer=opt_qry)
-> 1149 pytensor_function = pytensor.function(
   1150     inputs,
   1151     outputs,
   1152     updates={**rng_updates, **kwargs.pop("updates", {})},
   1153     mode=mode,
   1154     **kwargs,
   1155 )
   1156 return pytensor_function

File ~/miniforge3/lib/python3.10/site-packages/pytensor/compile/function/__init__.py:315, in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input)
    309     fn = orig_function(
    310         inputs, outputs, mode=mode, accept_inplace=accept_inplace, name=name
    311     )
    312 else:
    313     # note: pfunc will also call orig_function -- orig_function is
    314     #      a choke point that all compilation must pass through
--> 315     fn = pfunc(
    316         params=inputs,
    317         outputs=outputs,
    318         mode=mode,
    319         updates=updates,
    320         givens=givens,
    321         no_default_updates=no_default_updates,
    322         accept_inplace=accept_inplace,
    323         name=name,
    324         rebuild_strict=rebuild_strict,
    325         allow_input_downcast=allow_input_downcast,
    326         on_unused_input=on_unused_input,
    327         profile=profile,
    328         output_keys=output_keys,
    329     )
    330 return fn

File ~/miniforge3/lib/python3.10/site-packages/pytensor/compile/function/pfunc.py:367, in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys, fgraph)
    353     profile = ProfileStats(message=profile)
    355 inputs, cloned_outputs = construct_pfunc_ins_and_outs(
    356     params,
    357     outputs,
   (...)
    364     fgraph=fgraph,
    365 )
--> 367 return orig_function(
    368     inputs,
    369     cloned_outputs,
    370     mode,
    371     accept_inplace=accept_inplace,
    372     name=name,
    373     profile=profile,
    374     on_unused_input=on_unused_input,
    375     output_keys=output_keys,
    376     fgraph=fgraph,
    377 )

File ~/miniforge3/lib/python3.10/site-packages/pytensor/compile/function/types.py:1756, in orig_function(inputs, outputs, mode, accept_inplace, name, profile, on_unused_input, output_keys, fgraph)
   1744     m = Maker(
   1745         inputs,
   1746         outputs,
   (...)
   1753         fgraph=fgraph,
   1754     )
   1755     with config.change_flags(compute_test_value="off"):
-> 1756         fn = m.create(defaults)
   1757 finally:
   1758     t2 = time.perf_counter()

File ~/miniforge3/lib/python3.10/site-packages/pytensor/compile/function/types.py:1649, in FunctionMaker.create(self, input_storage, storage_map)
   1646 start_import_time = pytensor.link.c.cmodule.import_time
   1648 with config.change_flags(traceback__limit=config.traceback__compile_limit):
-> 1649     _fn, _i, _o = self.linker.make_thunk(
   1650         input_storage=input_storage_lists, storage_map=storage_map
   1651     )
   1653 end_linker = time.perf_counter()
   1655 linker_time = end_linker - start_linker

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/basic.py:254, in LocalLinker.make_thunk(self, input_storage, output_storage, storage_map, **kwargs)
    247 def make_thunk(
    248     self,
    249     input_storage: Optional["InputStorageType"] = None,
   (...)
    252     **kwargs,
    253 ) -> Tuple["BasicThunkType", "InputStorageType", "OutputStorageType"]:
--> 254     return self.make_all(
    255         input_storage=input_storage,
    256         output_storage=output_storage,
    257         storage_map=storage_map,
    258     )[:3]

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/vm.py:1297, in VMLinker.make_all(self, profiler, input_storage, output_storage, storage_map)
   1294 else:
   1295     post_thunk_clear = None
-> 1297 vm = self.make_vm(
   1298     order,
   1299     thunks,
   1300     input_storage,
   1301     output_storage,
   1302     storage_map,
   1303     post_thunk_clear,
   1304     computed,
   1305     compute_map,
   1306     self.updated_vars,
   1307 )
   1309 vm.storage_map = storage_map
   1310 vm.compute_map = compute_map

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/vm.py:1020, in VMLinker.make_vm(self, nodes, thunks, input_storage, output_storage, storage_map, post_thunk_clear, computed, compute_map, updated_vars)
   1017 pre_call_clear = [storage_map[v] for v in self.no_recycling]
   1019 try:
-> 1020     from pytensor.link.c.cvm import CVM
   1021 except (MissingGXX, ImportError):
   1022     CVM = None

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/cvm.py:13
      9 if not config.cxx:
     10     raise MissingGXX(
     11         "lazylinker will not be imported if pytensor.config.cxx is not set."
     12     )
---> 13 from pytensor.link.c.lazylinker_c import CLazyLinker
     15 class CVM(CLazyLinker, VM):
     16     def __init__(self, fgraph, *args, **kwargs):

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/lazylinker_c.py:143
    140         assert os.path.exists(loc)
    142 args = GCC_compiler.compile_args()
--> 143 GCC_compiler.compile_str(dirname, code, location=loc, preargs=args)
    144 # Save version into the __init__.py file.
    145 init_py = os.path.join(loc, "__init__.py")

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/cmodule.py:2661, in GCC_compiler.compile_str(module_name, src_code, location, include_dirs, lib_dirs, libs, preargs, py_module, hide_symbols)
   2659     pass
   2660 assert os.path.isfile(lib_filename)
-> 2661 return dlimport(lib_filename)

File ~/miniforge3/lib/python3.10/site-packages/pytensor/link/c/cmodule.py:349, in dlimport(fullpath, suffix)
    346 finally:
    347     del sys.path[0]
--> 349 assert fullpath.startswith(rval.__file__)
    350 return rval

AssertionError: 

Recommended installation instructions can be found here.