Matrix Multiplication With Multiple Dimensions in PYMC Model

Hey I was hoping for some help with this problem, I can’t get matrix multiplication to work when using dimensions (or shapes) for a pymc model with multiple linear regression. Essentially I want y = ax1 + bx2 + c.

Here is my code and error. Any help is super appreciated!

# we have 30 products in the data and we have 2 variables for linear regression
product_map = [i for i in range(30)]
xdims = [i for i in range(2)]

# create a product id for each input (we have 200 datapoints) and create x data
product = np.random.randint(0, high=30, size=200)
x = np.random.normal(loc=0.0, scale=1.0, size=(200,2))
print(x.shape, product.shape)

# create the model
unpooled_model = pm.Model(coords={'product':product_map, 'xdims':xdims})
with unpooled_model:
    m = pm.Normal('m', mu=1, sigma=20, dims=("xdims","product"))
    b = pm.Normal('b', mu=30_000, sigma=50_000, dims="product")
    std = pm.HalfNormal('std', sigma=400_000)
    
    xdata = pm.Data('xdata', x, mutable=True)
    product_data = pm.Data('product_data', product, mutable=True)

    print(xdata.shape.eval())
    print(m.shape.eval())
    print(b.shape.eval())
    mean = xdata.dot(m) + b
    print(mean.shape.eval())
    obs = pm.Normal('obs', mu=mean[:, product_data], sigma=std, observed=y)
    
    unpooled_trace_variety = pm.sample(tune=N_TUNE, return_inferencedata=True, chains=N_CHAINS, target_accept=TARGET_ACCEPT, cores=N_CORES)
(200, 2) (200,)
[200   2]
[ 2 30]
[30]
[200  30]
---------------------------------------------------------------------------
ShapeError                                Traceback (most recent call last)
Input In [60], in <cell line: 10>()
     21 mean = xdata.dot(m) + b
     22 print(mean.shape.eval())
---> 23 obs = pm.Normal('obs', mu=mean[:, product_data], sigma=std, observed=y)
     25 unpooled_trace_variety = pm.sample(tune=N_TUNE, return_inferencedata=True, chains=N_CHAINS, target_accept=TARGET_ACCEPT, cores=N_CORES)

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/pymc/distributions/distribution.py:271, in Distribution.__new__(cls, name, rng, dims, initval, observed, total_size, transform, *args, **kwargs)
    267 if resize_shape:
    268     # A batch size was specified through `dims`, or implied by `observed`.
    269     rv_out = change_rv_size(rv=rv_out, new_size=resize_shape, expand=True)
--> 271 rv_out = model.register_rv(
    272     rv_out,
    273     name,
    274     observed,
    275     total_size,
    276     dims=dims,
    277     transform=transform,
    278     initval=initval,
    279 )
    281 # add in pretty-printing support
    282 rv_out.str_repr = types.MethodType(str_for_dist, rv_out)

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/pymc/model.py:1375, in Model.register_rv(self, rv_var, name, data, total_size, dims, transform, initval)
   1368         raise TypeError(
   1369             "Variables that depend on other nodes cannot be used for observed data."
   1370             f"The data variable was: {data}"
   1371         )
   1373     # `rv_var` is potentially changed by `make_obs_var`,
   1374     # for example into a new graph for imputation of missing data.
-> 1375     rv_var = self.make_obs_var(rv_var, data, dims, transform)
   1377 return rv_var

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/pymc/model.py:1401, in Model.make_obs_var(self, rv_var, data, dims, transform)
   1398 data = convert_observed_data(data).astype(rv_var.dtype)
   1400 if data.ndim != rv_var.ndim:
-> 1401     raise ShapeError(
   1402         "Dimensionality of data and RV don't match.", actual=data.ndim, expected=rv_var.ndim
   1403     )
   1405 if aesara.config.compute_test_value != "off":
   1406     test_value = getattr(rv_var.tag, "test_value", None)

ShapeError: Dimensionality of data and RV don't match. (actual 1 != expected 2)

I assume your y has a shape of 200?

I think your mistake is in the indexing of the mean. Does mean[np.arange(200), product_data] do something different?

Regardless, that’s the shape you should evaluate for debugging as it probably has 1 extra dimensions than you expect.

1 Like

I’m just finishing a run but it appears that you are correct! Thank you so much for your help. Out of interest could you help me understand why sampling mean[:, product_data] fails? The reason for my confusion is this is similar to the syntax used to slice tensors in tensorflow. For PYMC are tensors held a little differently? my intuition would be we have a tensor generated here for each input, so we need to take the respective tensors then select the product value internally from that tensor, is that anywhere near reality?

Indexing in Aesara works just like Numpy (sans bugs):

import numpy as np

x = np.arange(10).reshape((5, 2))
print(x[:, [0, 1, 0, 1, 0]].shape)  # (5, 5)
print(x[[0, 1, 2, 3, 4], [0, 1, 0, 1, 0]].shape)  # (5,)
1 Like

Ah that makes a lot of sense okay sorry I was totally overthinking it!

I’m really sorry to ask for your help again, I thought I would have been able to keep going, but I tried taking this information and making it into a hierarchical model, but I am not getting any success. Is there something I’m doing wrong with the shapes here as well? I can’t make my shapes align.

# create a variety id, product id, and x data for each input (we have 200 datapoints)
# we can think of this like store products take pokemon cards
# variety would be trading cards, product would be pokemon card pack, and 
# the x data could be a few inputs like popularity and sales
# y would be the demand for the pokemon cards (which we want to predict)
variety = np.random.randint(0, high=10, size=200)
product = np.random.randint(0, high=30, size=200)
x = np.random.normal(loc=0.0, scale=1.0, size=(200,2))
print(x.shape, variety.shape, product.shape)

# create the target data 
y = np.random.normal(loc=0.0, scale=1.0, size=200)
print(y.shape)

heir_model_variety = pm.Model(coords={'variety':np.arange(10),'product':np.arange(30),'xdims':np.arange(2)})
with heir_model_variety:
    # global parameters
    mu_m = pm.Normal('mu_m', mu=1, sigma=10, dims="xdims")
    sigma_m = pm.HalfNormal('sigma_m', sigma=10, dims="xdims")
    
    mu_b = pm.Normal('mu_b', mu=50_000, sigma=100_000)
    sigma_b = pm.HalfNormal('sigma_b', sigma=100_000)
    
    std = pm.HalfNormal('std', sigma=500_000)
    
    # variety parameters
    mu_m_variety = pm.Normal('mu_m_variety', mu=mu_m, sigma=sigma_m, dims=("variety","xdims"))
    sigma_m_variety = pm.HalfNormal('sigma_m_variety', sigma=10, dims=("variety","xdims"))
    
    mu_b_variety = pm.Normal('mu_b_variety', mu=mu_b, sigma=sigma_b, dims="variety")
    sigma_b_variety = pm.HalfNormal('sigma_b_variety', sigma=100_000, dims="variety")
    
    # product parameters
    m = pm.Normal('m', mu=mu_m_variety, sigma=sigma_m_variety, dims=("product","variety","xdims"))
    b = pm.Normal('b', mu=mu_b_variety, sigma=sigma_b_variety, dims=("product","variety"))
    
    xdata = pm.Data('xdata', x, mutable=True)
    variety_data = pm.Data('variety_data', variety, mutable=True)
    product_data = pm.Data('product_data', product, mutable=True)
    
    print(xdata.shape.eval())
    print(m.T.shape.eval())
    print(xdata.dot(m.T).shape.eval())
    print(b.T.shape.eval())
    mean = xdata.dot(m.T) + b.T
    print(mean.shape.eval())
    obs = pm.Normal('obs', mu=mean[np.arange(200),product_data,variety_data], sigma=std, observed=y)
    
    heir_trace_variety = pm.sample(tune=N_TUNE, return_inferencedata=True, chains=N_CHAINS, target_accept=TARGET_ACCEPT, cores=N_CORES)
(200, 2) (200,) (200,)
(200,)
[200   2]
[ 2 10 30]
[200   2  30]
[10 30]
---------------------------------------------------------------------------
AssertionError                            Traceback (most recent call last)
File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/compile/function/types.py:975, in Function.__call__(self, *args, **kwargs)
    973 try:
    974     outputs = (
--> 975         self.vm()
    976         if output_subset is None
    977         else self.vm(output_subset=output_subset)
    978     )
    979 except Exception:

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/graph/op.py:541, in Op.make_py_thunk.<locals>.rval(p, i, o, n, params)
    533 @is_thunk_type
    534 def rval(
    535     p=p,
   (...)
    539     params=params_val,
    540 ):
--> 541     r = p(n, [x[0] for x in i], o, params)
    542     for o in node.outputs:

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/raise_op.py:96, in CheckAndRaise.perform(self, node, inputs, outputs, params)
     95 if not np.all(conds):
---> 96     raise self.exc_type(self.msg)

AssertionError: Could not broadcast dimensions

During handling of the above exception, another exception occurred:

AssertionError                            Traceback (most recent call last)
Input In [9], in <cell line: 16>()
     44 print(b.T.shape.eval())
     45 mean = xdata.dot(m.T) + b.T
---> 46 print(mean.shape.eval())
     47 obs = pm.Normal('obs', mu=mean[np.arange(200),product_data,variety_data], sigma=std, observed=y)
     49 heir_trace_variety = pm.sample(tune=N_TUNE, return_inferencedata=True, chains=N_CHAINS, target_accept=TARGET_ACCEPT, cores=N_CORES)

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/graph/basic.py:602, in Variable.eval(self, inputs_to_values)
    599     self._fn_cache[inputs] = function(inputs, self)
    600 args = [inputs_to_values[param] for param in inputs]
--> 602 rval = self._fn_cache[inputs](*args)
    604 return rval

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/compile/function/types.py:988, in Function.__call__(self, *args, **kwargs)
    986     if hasattr(self.vm, "thunks"):
    987         thunk = self.vm.thunks[self.vm.position_of_error]
--> 988     raise_with_op(
    989         self.maker.fgraph,
    990         node=self.vm.nodes[self.vm.position_of_error],
    991         thunk=thunk,
    992         storage_map=getattr(self.vm, "storage_map", None),
    993     )
    994 else:
    995     # old-style linkers raise their own exceptions
    996     raise

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/link/utils.py:534, in raise_with_op(fgraph, node, thunk, exc_info, storage_map)
    529     warnings.warn(
    530         f"{exc_type} error does not allow us to add an extra error message"
    531     )
    532     # Some exception need extra parameter in inputs. So forget the
    533     # extra long error message in that case.
--> 534 raise exc_value.with_traceback(exc_trace)

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/compile/function/types.py:975, in Function.__call__(self, *args, **kwargs)
    972 t0_fn = time.time()
    973 try:
    974     outputs = (
--> 975         self.vm()
    976         if output_subset is None
    977         else self.vm(output_subset=output_subset)
    978     )
    979 except Exception:
    980     restore_defaults()

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/graph/op.py:541, in Op.make_py_thunk.<locals>.rval(p, i, o, n, params)
    533 @is_thunk_type
    534 def rval(
    535     p=p,
   (...)
    539     params=params_val,
    540 ):
--> 541     r = p(n, [x[0] for x in i], o, params)
    542     for o in node.outputs:
    543         compute_map[o][0] = True

File ~/miniforge3/envs/pymc/lib/python3.10/site-packages/aesara/raise_op.py:96, in CheckAndRaise.perform(self, node, inputs, outputs, params)
     94 out[0] = val
     95 if not np.all(conds):
---> 96     raise self.exc_type(self.msg)

AssertionError: Could not broadcast dimensions
Apply node that caused the error: Assert{msg=Could not broadcast dimensions}(Abs.0, TensorFromScalar.0)
Toposort index: 32
Inputs types: [ScalarType(int64), TensorType(bool, ())]
Inputs shapes: [(), ()]
Inputs strides: [(), ()]
Inputs values: [10, array(False)]
Outputs clients: [[TensorFromScalar(Assert{msg=Could not broadcast dimensions}.0)]]

HINT: Re-running with most Aesara optimizations disabled could provide a back-trace showing when this node was created. This can be done by setting the Aesara flag 'optimizer=fast_compile'. If that does not work, Aesara optimizations can be disabled with 'optimizer=None'.
HINT: Use the Aesara flag `exception_verbosity=high` for a debug print-out and storage map footprint of this Apply node.
1 Like