Beginner Question: Error with transformations

Hello,

I am trying to run a model from Statistical Rethinking Ch 11. I am using this code but trying to use PyMC v4 and aesara. I am using PyMC 4.1.1 and aesara 2.7.4
Here’s my code:

with pm.Model() as m11_1:
  a = pm.Normal('a', 0, 10, shape=6, transform=pm.distributions.transforms.Ordered, initval=np.arange(6)-2.5)
  pa = pm.math.sigmoid(a)

  p_cum = at.concatenate([[0], pa, [1]])
  p  = p_cum[1:] - p_cum[:-1]

  resp = pm.Categorical('resp', p, observed=data.response-1)
  trace = pm.sample()

And the error I am getting is

ERROR (aesara.graph.opt): Optimization failure due to: transform_values
ERROR (aesara.graph.opt): node: normal_rv{0, (0, 0), floatX, False}(RandomGeneratorSharedVariable(<Generator(PCG64) at 0x192823BC900>), TensorConstant{(1,) of 6}, TensorConstant{11}, TensorConstant{0}, TensorConstant{10.0})
ERROR (aesara.graph.opt): TRACEBACK:
ERROR (aesara.graph.opt): Traceback (most recent call last):
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aesara\graph\opt.py", line 1861, in process_node
    replacements = lopt.transform(fgraph, node)
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aesara\graph\opt.py", line 1066, in transform
    return self.fn(fgraph, node)
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aeppl\transforms.py", line 159, in transform_values
    transform.backward(value_var, *trans_node.inputs), value_var
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\pymc\distributions\transforms.py", line 73, in backward
    x = at.zeros(value.shape)
AttributeError: 'RandomGeneratorSharedVariable' object has no attribute 'shape'

ERROR (aesara.graph.opt): Optimization failure due to: transform_values
ERROR (aesara.graph.opt): node: normal_rv{0, (0, 0), floatX, False}(RandomGeneratorSharedVariable(<Generator(PCG64) at 0x192823BC900>), TensorConstant{(1,) of 6}, TensorConstant{11}, TensorConstant{0}, TensorConstant{10.0})
ERROR (aesara.graph.opt): TRACEBACK:
ERROR (aesara.graph.opt): Traceback (most recent call last):
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aesara\graph\opt.py", line 1861, in process_node
    replacements = lopt.transform(fgraph, node)
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aesara\graph\opt.py", line 1066, in transform
    return self.fn(fgraph, node)
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\aeppl\transforms.py", line 159, in transform_values
    transform.backward(value_var, *trans_node.inputs), value_var
  File "C:\Users\blake\anaconda3\envs\pymc4\lib\site-packages\pymc\distributions\transforms.py", line 73, in backward
    x = at.zeros(value.shape)
AttributeError: 'RandomGeneratorSharedVariable' object has no attribute 'shape'

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
Input In [8], in <cell line: 1>()
      1 with m11_1:
----> 2     trace = pm.sample()

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\sampling.py:524, in sample(draws, step, init, n_init, initvals, trace, chain_idx, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, callback, jitter_max_retries, return_inferencedata, idata_kwargs, mp_ctx, **kwargs)
    521         auto_nuts_init = False
    523 initial_points = None
--> 524 step = assign_step_methods(model, step, methods=pm.STEP_METHODS, step_kwargs=kwargs)
    526 if isinstance(step, list):
    527     step = CompoundStep(step)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\sampling.py:229, in assign_step_methods(model, step, methods, step_kwargs)
    221         selected = max(
    222             methods,
    223             key=lambda method, var=rv_var, has_gradient=has_gradient: method._competence(
    224                 var, has_gradient
    225             ),
    226         )
    227         selected_steps[selected].append(var)
--> 229 return instantiate_steppers(model, steps, selected_steps, step_kwargs)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\sampling.py:147, in instantiate_steppers(model, steps, selected_steps, step_kwargs)
    145         args = step_kwargs.get(step_class.name, {})
    146         used_keys.add(step_class.name)
--> 147         step = step_class(vars=vars, model=model, **args)
    148         steps.append(step)
    150 unused_args = set(step_kwargs).difference(used_keys)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\step_methods\hmc\nuts.py:178, in NUTS.__init__(self, vars, max_treedepth, early_max_treedepth, **kwargs)
    120 def __init__(self, vars=None, max_treedepth=10, early_max_treedepth=8, **kwargs):
    121     r"""Set up the No-U-Turn sampler.
    122 
    123     Parameters
   (...)
    176     `pm.sample` to the desired number of tuning steps.
    177     """
--> 178     super().__init__(vars, **kwargs)
    180     self.max_treedepth = max_treedepth
    181     self.early_max_treedepth = early_max_treedepth

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\step_methods\hmc\base_hmc.py:95, in BaseHMC.__init__(self, vars, scaling, step_scale, is_cov, model, blocked, potential, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **aesara_kwargs)
     92 else:
     93     vars = [self._model.rvs_to_values.get(var, var) for var in vars]
---> 95 super().__init__(vars, blocked=blocked, model=self._model, dtype=dtype, **aesara_kwargs)
     97 self.adapt_step_size = adapt_step_size
     98 self.Emax = Emax

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\step_methods\arraystep.py:276, in GradientSharedStep.__init__(self, vars, model, blocked, dtype, logp_dlogp_func, **aesara_kwargs)
    273 model = modelcontext(model)
    275 if logp_dlogp_func is None:
--> 276     func = model.logp_dlogp_function(vars, dtype=dtype, **aesara_kwargs)
    277 else:
    278     func = logp_dlogp_func

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\model.py:637, in Model.logp_dlogp_function(self, grad_vars, tempered, **kwargs)
    635 input_vars = {i for i in graph_inputs(costs) if not isinstance(i, Constant)}
    636 extra_vars = [self.rvs_to_values.get(var, var) for var in self.free_RVs]
--> 637 ip = self.initial_point(0)
    638 extra_vars_and_values = {
    639     var: ip[var.name] for var in extra_vars if var in input_vars and var not in grad_vars
    640 }
    641 return ValueGradFunction(costs, grad_vars, extra_vars_and_values, **kwargs)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\model.py:1067, in Model.initial_point(self, seed)
   1059 def initial_point(self, seed=None) -> Dict[str, np.ndarray]:
   1060     """Computes the initial point of the model.
   1061 
   1062     Returns
   (...)
   1065         Maps names of transformed variables to numeric initial values in the transformed space.
   1066     """
-> 1067     fn = make_initial_point_fn(model=self, return_transformed=True)
   1068     return Point(fn(seed), model=self)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\initial_point.py:159, in make_initial_point_fn(model, overrides, jitter_rvs, default_strategy, return_transformed)
    153 sdict_overrides = convert_str_to_rv_dict(model, overrides or {})
    154 initval_strats = {
    155     **model.initial_values,
    156     **sdict_overrides,
    157 }
--> 159 initial_values = make_initial_point_expression(
    160     free_rvs=model.free_RVs,
    161     rvs_to_values=model.rvs_to_values,
    162     initval_strategies=initval_strats,
    163     jitter_rvs=jitter_rvs,
    164     default_strategy=default_strategy,
    165     return_transformed=return_transformed,
    166 )
    168 # Replace original rng shared variables so that we don't mess with them
    169 # when calling the final seeded function
    170 graph = FunctionGraph(outputs=initial_values, clone=False)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\initial_point.py:281, in make_initial_point_expression(free_rvs, rvs_to_values, initval_strategies, jitter_rvs, default_strategy, return_transformed)
    278 transform = getattr(rvs_to_values[variable].tag, "transform", None)
    280 if transform is not None:
--> 281     value = transform.forward(value, *variable.owner.inputs)
    283 if variable in jitter_rvs:
    284     jitter = at.random.uniform(-1, 1, size=value.shape)

File ~\anaconda3\envs\pymc4\lib\site-packages\pymc\distributions\transforms.py:79, in Ordered.forward(self, value, *inputs)
     78 def forward(self, value, *inputs):
---> 79     y = at.zeros(value.shape)
     80     y = at.inc_subtensor(y[..., 0], value[..., 0])
     81     y = at.inc_subtensor(y[..., 1:], at.log(value[..., 1:] - value[..., :-1]))

AttributeError: 'RandomGeneratorSharedVariable' object has no attribute 'shape'

If anyone can point me in that right direction, I would appreciate it.

I think it’s just a question of using small case ordered?

Edit: Not very convinced, I’ll have to try on my machine

that was it. i swear i thought i tried both ways but i guess not. thanks!

2 Likes