Looping Theano functions

Folks,

I am a rubbish Theano programmer and need a little help implementing Theano’s softmax function iteratively. A reproducible bit of code would be:

import numpy as np
import pymc3 as pm
import theano as T
import theano.tensor as tt

Cjk_obs = np.array([[ 1,  0,  6,  4,  2, 14, 52,  6, 10,  0,  6, 14,  8,  1,  0],
       [ 0,  1, 15,  4,  0,  2,  1, 16,  3,  4,  1,  5,  1,  0,  5],
       [ 0,  3,  6,  4,  0, 43,  3, 19,  1, 11,  0,  2,  3, 10,  2],
       [ 5,  1, 21,  5,  9, 19, 27,  3,  0,  5,  3,  7,  1,  2,  0],
       [ 2,  4, 20, 14,  9,  0,  3,  8,  0,  2,  0, 18, 10, 33,  0],
       [ 3,  1, 10,  0,  1,  0,  0,  2,  0,  0, 11,  1,  2,  6,  0],
       [ 0,  3,  2, 32,  1,  2,  8,  0,  4,  0, 50,  2,  3,  4,  6],
       [ 1,  0,  5,  1,  0,  2,  2,  2,  0,  0,  0,  2,  6,  0,  0],
       [ 3,  0,  2, 23,  2,  0,  2, 38,  8,  3, 35, 30,  1,  1,  1],
       [ 3,  0,  0,  8,  5,  2, 43,  5, 21, 23,  0,  0,  1,  1,  2],
       [ 0,  1,  9, 17,  2,  0,  0,  0,  0,  0, 26,  8,  2,  2,  0],
       [ 0,  2,  0,  0,  2,  2,  2,  2,  0,  0,  5, 11,  1,  7,  4],
       [ 0,  0,  0,  0,  0,  1,  0,  0,  1,  1,  0,  1,  0,  0,  0],
       [ 6, 11,  0,  0,  1,  3, 79,  0,  5,  6,  3,  0,  0, 37,  1],
       [ 0,  1,  0,  2,  3,  2,  0,  3,  4,  1,  0,  2,  0,  4,  0],
       [ 1,  0,  0,  1,  0,  1,  7,  1,  0,  4,  0,  0,  0,  0,  2],
       [ 3,  1, 28,  0,  0,  0,  2, 32,  1,  7,  1, 26,  0, 17,  4],
       [ 0,  0,  1,  0,  0,  1,  6,  0,  3,  0,  0,  5,  0,  1,  2],
       [ 0,  3, 50, 13,  1,  0,  9,  3,  2,  2,  7,  0,  0, 11,  6],
       [ 0,  0,  1,  0,  0,  0,  0,  2,  1,  0,  1,  0,  0,  0,  0],
       [ 0,  0,  7,  0,  0, 17, 24,  1,  1, 13, 20, 17, 10,  1,  1],
       [ 0,  1,  1,  6,  1, 22,  0, 27,  1,  1, 26,  8, 12,  0,  0],
       [ 0,  0, 14, 10, 17,  0, 18,  0,  3,  0,  0, 13,  0,  0,  2],
       [ 0,  4, 25, 46, 30,  0,  0,  0,  6,  2,  4,  8,  0,  0, 24],
       [ 1,  2,  0,  0,  2,  4,  6, 14,  3, 22, 34,  0,  6, 19,  2]])

ncou = 25
nspp = 15

class SoftMax(pm.model.DeterministicWrapper):
    def __init__(self, x):
        self.x = tt.as_tensor_variable(x)
        
    def __call__(self, x):
        sofx,_ = T.scan(fn=tt.nnet.softmax, sequences=[self.x])
        return sofx

with pm.Model() as Modelx:
    
    # Dyads
    sd_dist = pm.Exponential.dist(1.0)
    chol_dyad, _, _ = pm.LKJCholeskyCov('chol_dyad', n=ncou, eta=8, sd_dist=sd_dist, compute_corr=True)
    z = pm.Normal('z', 0, 1, shape=(ncou, ncou))
    d = pm.Deterministic("d", pm.math.dot(chol_dyad, z).T.reshape((ncou, 1, ncou)))
    
    # Intercept
    β0 = pm.Normal('β0', 0, 2)
    
    # Hyperprior
    ki = pm.Normal('ki', 0, 1, shape=nspp)
    # Spp by cou
    sigmaki = pm.Uniform('ski', 0, 1)
    βki_z = pm.Normal('βki_z', 0, 1, shape=(ncou,nspp))
    βki = pm.Deterministic('βki', ki+βki_z*sigmaki)
    
    # Mean model
    μijk = β0+d+βki.T
    
    # Estimated proportions
    Pijk = pm.Deterministic('Pijk', SoftMax(μijk))
    
    # Multinomial
    Tijk = pm.Multinomial("Tijk", Cjk_obs, Pijk, shape=(ncou,nspp,ncou))

with the resulting error

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
/var/folders/61/kzz9zs3s1gdcy367fbq9v3740000gn/T/ipykernel_46747/4023565185.py in <module>
     21 
     22     # Estimated proportions
---> 23     Pijk = pm.Deterministic('Pijk', SoftMax(μijk))
     24 
     25     # Multinomial

/opt/anaconda3/lib/python3.8/site-packages/pymc3/model.py in Deterministic(name, var, model, dims)
   1944     """
   1945     model = modelcontext(model)
-> 1946     var = var.copy(model.name_for(name))
   1947     model.deterministics.append(var)
   1948     model.add_random_variable(var, dims)

/opt/anaconda3/lib/python3.8/site-packages/theano/tensor/var.py in copy(self, name)
    628         Does not copy the tags.
    629         """
--> 630         copied_variable = theano.tensor.basic.tensor_copy(self)
    631         copied_variable.name = name
    632         return copied_variable

/opt/anaconda3/lib/python3.8/site-packages/theano/graph/op.py in __call__(self, *inputs, **kwargs)
    248         """
    249         return_list = kwargs.pop("return_list", False)
--> 250         node = self.make_node(*inputs, **kwargs)
    251 
    252         if config.compute_test_value != "off":

/opt/anaconda3/lib/python3.8/site-packages/theano/tensor/elemwise.py in make_node(self, *inputs)
    497         using DimShuffle.
    498         """
--> 499         inputs = list(map(as_tensor_variable, inputs))
    500         out_dtypes, out_broadcastables, inputs = self.get_output_info(
    501             DimShuffle, *inputs

/opt/anaconda3/lib/python3.8/site-packages/theano/tensor/basic.py in as_tensor_variable(x, name, ndim)
    141             return as_tensor_variable(x.data, name=name, ndim=ndim)
    142 
--> 143         if isinstance(x.type, scal.Scalar):
    144             x = tensor_from_scalar(x)
    145 

AttributeError: 'SoftMax' object has no attribute 'type'

While the model obviously isn’t complete, I could use some help using theano.scan to do a softmax loop over the deterministics passed in to it.

What do you mean by running the softmax iteratively?

Are you trying to just apply it to each row/col of the matrix?

You could do that without a Scan, perhaps just reshaping your matrix or transposing it so that it applies it in the expected order.

By iterative, I would expect you are using the previous passes as part of your input in the next iteration, but I don’t see anything like that.

Otherwise I don’t see why you would need to use that wrapper class at all. You can just use a scan directly inside the model (or a Deterministic)

Doh! Re-shaping - of course! Yes, you’re right, this isn’t iterative, and now that I can see that this can all be re-shaped in and out of Theano’s softmax there isn’t a need for scan. I’ll post a solution once I have it sorted, but will come up with an interative example too: I think there are several classes of models for which it would be helpful to understand how to do it in Aesara.

Thanks for the help.

1 Like