IndexError: tuple index out of range error from pm.sample

Hi, I’m facing the same error… also using Windows.
It was working fine until a couple of weeks ago; it’s something to do with cloudpickle, here’s the full trace of the error I get:

---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
Cell In[38], line 9
      7 p = pm.Beta("p", alpha=1, beta=1)
      8 y = pm.Binomial("y", n=10, p=p, observed=[1,1,0,0])
----> 9 idata = pm.sample()

File ~\anaconda3\envs\H2\Lib\site-packages\pymc\sampling\mcmc.py:546, in sample(draws, step, init, n_init, initvals, trace, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, callback, jitter_max_retries, return_inferencedata, keep_warning_stat, idata_kwargs, mp_ctx, **kwargs)
    544 _print_step_hierarchy(step)
    545 try:
--> 546     _mp_sample(**sample_args, **parallel_args)
    547 except pickle.PickleError:
    548     _log.warning("Could not pickle model, sampling singlethreaded.")

File ~\anaconda3\envs\H2\Lib\site-packages\pymc\sampling\mcmc.py:900, in _mp_sample(draws, tune, step, chains, cores, random_seed, start, progressbar, traces, model, callback, mp_ctx, **kwargs)
    897 # We did draws += tune in pm.sample
    898 draws -= tune
--> 900 sampler = ps.ParallelSampler(
    901     draws=draws,
    902     tune=tune,
    903     chains=chains,
    904     cores=cores,
    905     seeds=random_seed,
    906     start_points=start,
    907     step_method=step,
    908     progressbar=progressbar,
    909     mp_ctx=mp_ctx,
    910 )
    911 try:
    912     try:

File ~\anaconda3\envs\H2\Lib\site-packages\pymc\sampling\parallel.py:401, in ParallelSampler.__init__(self, draws, tune, chains, cores, seeds, start_points, step_method, progressbar, mp_ctx)
    399 step_method_pickled = None
    400 if mp_ctx.get_start_method() != "fork":
--> 401     step_method_pickled = cloudpickle.dumps(step_method, protocol=-1)
    403 self._samplers = [
    404     ProcessAdapter(
    405         draws,
   (...)
    414     for chain, seed, start in zip(range(chains), seeds, start_points)
    415 ]
    417 self._inactive = self._samplers.copy()

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:73, in dumps(obj, protocol, buffer_callback)
     69 with io.BytesIO() as file:
     70     cp = CloudPickler(
     71         file, protocol=protocol, buffer_callback=buffer_callback
     72     )
---> 73     cp.dump(obj)
     74     return file.getvalue()

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:602, in CloudPickler.dump(self, obj)
    600 def dump(self, obj):
    601     try:
--> 602         return Pickler.dump(self, obj)
    603     except RuntimeError as e:
    604         if "recursion" in e.args[0]:

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:692, in CloudPickler.reducer_override(self, obj)
    690     return _class_reduce(obj)
    691 elif isinstance(obj, types.FunctionType):
--> 692     return self._function_reduce(obj)
    693 else:
    694     # fallback to save_global, including the Pickler's
    695     # dispatch_table
    696     return NotImplemented

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:565, in CloudPickler._function_reduce(self, obj)
    563     return NotImplemented
    564 else:
--> 565     return self._dynamic_function_reduce(obj)

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:546, in CloudPickler._dynamic_function_reduce(self, func)
    544 """Reduce a function that is not pickleable via attribute lookup."""
    545 newargs = self._function_getnewargs(func)
--> 546 state = _function_getstate(func)
    547 return (types.FunctionType, newargs, state, None, None,
    548         _function_setstate)

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle_fast.py:157, in _function_getstate(func)
    139 def _function_getstate(func):
    140     # - Put func's dynamic attributes (stored in func.__dict__) in state. These
    141     #   attributes will be restored at unpickling time using
   (...)
    144     #   unpickling time by iterating over slotstate and calling setattr(func,
    145     #   slotname, slotvalue)
    146     slotstate = {
    147         "__name__": func.__name__,
    148         "__qualname__": func.__qualname__,
   (...)
    154         "__closure__": func.__closure__,
    155     }
--> 157     f_globals_ref = _extract_code_globals(func.__code__)
    158     f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in
    159                  func.__globals__}
    161     closure_values = (
    162         list(map(_get_cell_contents, func.__closure__))
    163         if func.__closure__ is not None else ()
    164     )

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle.py:334, in _extract_code_globals(co)
    330 names = co.co_names
    331 # We use a dict with None values instead of a set to get a
    332 # deterministic order (assuming Python 3.6+) and avoid introducing
    333 # non-deterministic pickle bytes as a results.
--> 334 out_names = {names[oparg]: None for _, oparg in _walk_global_ops(co)}
    336 # Declaring a function inside another one using the "def ..."
    337 # syntax generates a constant code object corresponding to the one
    338 # of the nested function's As the nested function may itself need
    339 # global variables, we need to introspect its code, extract its
    340 # globals, (look for code object in it's co_consts attribute..) and
    341 # add the result to code_globals
    342 if co.co_consts:

File ~\anaconda3\envs\H2\Lib\site-packages\cloudpickle\cloudpickle.py:334, in <dictcomp>(.0)
    330 names = co.co_names
    331 # We use a dict with None values instead of a set to get a
    332 # deterministic order (assuming Python 3.6+) and avoid introducing
    333 # non-deterministic pickle bytes as a results.
--> 334 out_names = {names[oparg]: None for _, oparg in _walk_global_ops(co)}
    336 # Declaring a function inside another one using the "def ..."
    337 # syntax generates a constant code object corresponding to the one
    338 # of the nested function's As the nested function may itself need
    339 # global variables, we need to introspect its code, extract its
    340 # globals, (look for code object in it's co_consts attribute..) and
    341 # add the result to code_globals
    342 if co.co_consts:

IndexError: tuple index out of range