GARCH11 with nonzero mean?

Error message

CompileError Traceback (most recent call last)
Cell In[25], line 2
1 with garch_vol_model:
----> 2 idata = pm.sample()
4 #posterior = idata.posterior.stack(pooled_chain=(“chain”, “draw”))
5 #posterior[“exp_volatility”] = np.exp(posterior[“volatility”])

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\sampling\mcmc.py:447, in sample(draws, step, init, n_init, initvals, trace, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, callback, jitter_max_retries, return_inferencedata, keep_warning_stat, idata_kwargs, mp_ctx, **kwargs)
444 auto_nuts_init = False
446 initial_points = None
→ 447 step = assign_step_methods(model, step, methods=pm.STEP_METHODS, step_kwargs=kwargs)
449 if isinstance(step, list):
450 step = CompoundStep(step)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\sampling\mcmc.py:189, in assign_step_methods(model, step, methods, step_kwargs)
181 selected = max(
182 methods,
183 key=lambda method, var=rv_var, has_gradient=has_gradient: method._competence(
184 var, has_gradient
185 ),
186 )
187 selected_steps[selected].append(var)
→ 189 return instantiate_steppers(model, steps, selected_steps, step_kwargs)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\sampling\mcmc.py:107, in instantiate_steppers(model, steps, selected_steps, step_kwargs)
105 args = step_kwargs.get(step_class.name, {})
106 used_keys.add(step_class.name)
→ 107 step = step_class(vars=vars, model=model, **args)
108 steps.append(step)
110 unused_args = set(step_kwargs).difference(used_keys)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\step_methods\hmc\nuts.py:182, in NUTS.init(self, vars, max_treedepth, early_max_treedepth, **kwargs)
124 def init(self, vars=None, max_treedepth=10, early_max_treedepth=8, **kwargs):
125 r""“Set up the No-U-Turn sampler.
126
127 Parameters
(…)
180 pm.sample to the desired number of tuning steps.
181 “””
→ 182 super().init(vars, **kwargs)
184 self.max_treedepth = max_treedepth
185 self.early_max_treedepth = early_max_treedepth

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\step_methods\hmc\base_hmc.py:109, in BaseHMC.init(self, vars, scaling, step_scale, is_cov, model, blocked, potential, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **pytensor_kwargs)
107 else:
108 vars = get_value_vars_from_user_vars(vars, self._model)
→ 109 super().init(vars, blocked=blocked, model=self._model, dtype=dtype, **pytensor_kwargs)
111 self.adapt_step_size = adapt_step_size
112 self.Emax = Emax

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\step_methods\arraystep.py:263, in GradientSharedStep.init(self, vars, model, blocked, dtype, logp_dlogp_func, **pytensor_kwargs)
260 model = modelcontext(model)
262 if logp_dlogp_func is None:
→ 263 func = model.logp_dlogp_function(vars, dtype=dtype, **pytensor_kwargs)
264 else:
265 func = logp_dlogp_func

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\model.py:648, in Model.logp_dlogp_function(self, grad_vars, tempered, **kwargs)
642 ip = self.initial_point(0)
643 extra_vars_and_values = {
644 var: ip[var.name]
645 for var in self.value_vars
646 if var in input_vars and var not in grad_vars
647 }
→ 648 return ValueGradFunction(costs, grad_vars, extra_vars_and_values, **kwargs)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\model.py:389, in ValueGradFunction.init(self, costs, grad_vars, extra_vars_and_values, dtype, casting, compute_grads, **kwargs)
385 outputs = [cost]
387 inputs = grad_vars
→ 389 self._pytensor_function = compile_pymc(inputs, outputs, givens=givens, **kwargs)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pymc\pytensorf.py:1121, in compile_pymc(inputs, outputs, random_seed, mode, **kwargs)
1119 opt_qry = mode.provided_optimizer.including(“random_make_inplace”, check_parameter_opt)
1120 mode = Mode(linker=mode.linker, optimizer=opt_qry)
→ 1121 pytensor_function = pytensor.function(
1122 inputs,
1123 outputs,
1124 updates={**rng_updates, **kwargs.pop(“updates”, {})},
1125 mode=mode,
1126 **kwargs,
1127 )
1128 return pytensor_function

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\compile\function_init_.py:315, in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input)
309 fn = orig_function(
310 inputs, outputs, mode=mode, accept_inplace=accept_inplace, name=name
311 )
312 else:
313 # note: pfunc will also call orig_function – orig_function is
314 # a choke point that all compilation must pass through
→ 315 fn = pfunc(
316 params=inputs,
317 outputs=outputs,
318 mode=mode,
319 updates=updates,
320 givens=givens,
321 no_default_updates=no_default_updates,
322 accept_inplace=accept_inplace,
323 name=name,
324 rebuild_strict=rebuild_strict,
325 allow_input_downcast=allow_input_downcast,
326 on_unused_input=on_unused_input,
327 profile=profile,
328 output_keys=output_keys,
329 )
330 return fn

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\compile\function\pfunc.py:367, in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys, fgraph)
353 profile = ProfileStats(message=profile)
355 inputs, cloned_outputs = construct_pfunc_ins_and_outs(
356 params,
357 outputs,
(…)
364 fgraph=fgraph,
365 )
→ 367 return orig_function(
368 inputs,
369 cloned_outputs,
370 mode,
371 accept_inplace=accept_inplace,
372 name=name,
373 profile=profile,
374 on_unused_input=on_unused_input,
375 output_keys=output_keys,
376 fgraph=fgraph,
377 )

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\compile\function\types.py:1766, in orig_function(inputs, outputs, mode, accept_inplace, name, profile, on_unused_input, output_keys, fgraph)
1754 m = Maker(
1755 inputs,
1756 outputs,
(…)
1763 fgraph=fgraph,
1764 )
1765 with config.change_flags(compute_test_value=“off”):
→ 1766 fn = m.create(defaults)
1767 finally:
1768 t2 = time.perf_counter()

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\compile\function\types.py:1659, in FunctionMaker.create(self, input_storage, trustme, storage_map)
1656 start_import_time = pytensor.link.c.cmodule.import_time
1658 with config.change_flags(traceback__limit=config.traceback__compile_limit):
→ 1659 _fn, _i, _o = self.linker.make_thunk(
1660 input_storage=input_storage_lists, storage_map=storage_map
1661 )
1663 end_linker = time.perf_counter()
1665 linker_time = end_linker - start_linker

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\link\basic.py:254, in LocalLinker.make_thunk(self, input_storage, output_storage, storage_map, **kwargs)
247 def make_thunk(
248 self,
249 input_storage: Optional[“InputStorageType”] = None,
(…)
252 **kwargs,
253 ) → Tuple[“BasicThunkType”, “InputStorageType”, “OutputStorageType”]:
→ 254 return self.make_all(
255 input_storage=input_storage,
256 output_storage=output_storage,
257 storage_map=storage_map,
258 )[:3]

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\link\vm.py:1255, in VMLinker.make_all(self, profiler, input_storage, output_storage, storage_map)
1253 thunks[-1].lazy = False
1254 except Exception:
→ 1255 raise_with_op(fgraph, node)
1257 t1 = time.perf_counter()
1259 if self.profile:

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\link\utils.py:536, in raise_with_op(fgraph, node, thunk, exc_info, storage_map)
531 warnings.warn(
532 f"{exc_type} error does not allow us to add an extra error message"
533 )
534 # Some exception need extra parameter in inputs. So forget the
535 # extra long error message in that case.
→ 536 raise exc_value.with_traceback(exc_trace)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\link\vm.py:1246, in VMLinker.make_all(self, profiler, input_storage, output_storage, storage_map)
1241 thunk_start = time.perf_counter()
1242 # no-recycling is done at each VM.call So there is
1243 # no need to cause duplicate c code by passing
1244 # no_recycling here.
1245 thunks.append(
→ 1246 node.op.make_thunk(node, storage_map, compute_map, , impl=impl)
1247 )
1248 linker_make_thunk_time[node] = time.perf_counter() - thunk_start
1249 if not hasattr(thunks[-1], “lazy”):
1250 # We don’t want all ops maker to think about lazy Ops.
1251 # So if they didn’t specify that its lazy or not, it isn’t.
1252 # If this member isn’t present, it will crash later.

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\scan\op.py:1526, in Scan.make_thunk(self, node, storage_map, compute_map, no_recycling, impl)
1523 if impl == “py”:
1524 raise MissingGXX
→ 1526 from . import scan_perform_ext
1528 cython_mintaps = np.asarray(self.mintaps, dtype=“int32”)
1530 n_outs = self.info.n_mit_mot + self.info.n_mit_sot + self.info.n_sit_sot

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\scan\scan_perform_ext.py:92
89 with open(cfile) as f:
90 code = f.read()
—> 92 cmodule.GCC_compiler.compile_str(
93 dirname, code, location=loc, preargs=preargs, hide_symbols=False
94 )
95 # Save version into the init.py file.
96 init_py = os.path.join(loc, “init.py”)

File ~\anaconda3\envs\pymc5_env\Lib\site-packages\pytensor\link\c\cmodule.py:2641, in GCC_compiler.compile_str(module_name, src_code, location, include_dirs, lib_dirs, libs, preargs, py_module, hide_symbols)
2633 print(
2634 “Check if package python-dev or python-devel is installed.”
2635 )
2637 # We replace ‘\n’ by ‘. ’ in the error message because when Python
2638 # prints the exception, having ‘\n’ in the text makes it more
2639 # difficult to read.
2640 # compile_stderr = compile_stderr.replace(“\n”, ". ")
→ 2641 raise CompileError(
2642 f"Compilation failed (return status={status}):\n{’ '.join(cmd)}\n{compile_stderr}"
2643 )
2644 elif config.cmodule__compilation_warning and compile_stderr:
2645 # Print errors just below the command line.
2646 print(compile_stderr