How to use plot_trace to draw individuals results in dependet piture?

############ the model fit code ###############
def hmm_fit(inp, nans, n_subs, last):
# inp - array containing responses, outcomes, and a switch variable witch turns off update in the presence of nans
# nans - bool array pointing towards locations of nan responses and outcomes
# n_subs - int value, total number of subjects (each subjects is fited to a different parameter value)
# last - int value, negative value denoting number of last trials to exclude from parameter estimation
# e.g. setting last = -35 excludes the last 35 trials from parameter estimation.

# define the hierarchical parametric model for ED-HMM
# define the hierarchical parametric model
d_max = 200  # maximal value for state duration
with Model() as hmm:
    d = tt.arange(d_max)  # vector of possible duration values from zero to d_max
    d = tt.tile(d, (n_subs, 1))
    P = tt.ones((2, 2)) - tt.eye(2)  # permutation matrix

    # set prior state probability
    theta0 = tt.ones(n_subs) / 2

    # set hierarchical prior for delta parameter of prior beliefs p_0(d)
    dtau = HalfCauchy('dtau', beta=1)
    dloc = HalfCauchy('dloc', beta=dtau, shape=(n_subs,))
    # rate of prior beliefs
    delta = Deterministic('delta', dloc / (1 + dloc))

    # set hierarchical prior for r parameter of prior beliefs p_0(d)
    r = 1.0

    # compute prior beliefs over state durations for given
    binomln = tt.gammaln(d + r) - tt.gammaln(d + 1) - tt.gammaln(r)
    pd0 = tt.nnet.softmax(binomln + d * log(1 - delta[:, None]) + r * log(delta[:, None]))

    # set joint probability distribution
    joint0 = tt.stack([theta0[:, None] * pd0, (1 - theta0)[:, None] * pd0]).dimshuffle(1, 0, 2)

    # set hierarchical priors for response noises
    btau = HalfCauchy('btau', beta=1)
    bloc = HalfCauchy('bloc', beta=btau, shape=(n_subs,))
    # response precision
    beta = Deterministic('beta', 1 / bloc)

    # set hierarchical priors for initial beliefs about reward probability
    mtau = HalfCauchy('mtau', beta=4)
    mloc = HalfCauchy('mloc', beta=mtau, shape=(n_subs, 2))
    # initial expectations
    muA = Deterministic('muA', mloc[:, 0] / (1 + mloc[:, 0]))
    muB = Deterministic('muB', 1 / (1 + mloc[:, 1]))
    init = tt.stacklists([[10 * muA, 10 * (1 - muA)], [10 * muB, 10 * (1 - muB)]]).dimshuffle(2, 0, 1)

    # compute the posterior beliefs over states, durations, and reward probabilities
    (post, _) = scan(edhmm_model,
                     sequences=[inp],
                     outputs_info=[init, joint0],
                     non_sequences=[pd0, P, range(n_subs)],
                     name='hmm')

    # get posterior reward probability and state probability
    a0 = init[None, :, :, 0]
    b0 = init[None, :, :, 1]
    a = tt.concatenate([a0, post[0][:-1, :, :, 0]])
    b = tt.concatenate([b0, post[0][:-1, :, :, 1]])
    mu = Deterministic('mu', a / (a + b))
    theta = Deterministic('theta',
                          tt.concatenate([theta0[None, :], post[1][:-1].sum(axis=-1)[:, :, 0]])[:, :, None])
    # compute choice dependent expected reward probability
    mean = (theta * mu + (1 - theta) * mu.dot(P))

    # compute expected utility
    U = Deterministic('U', 2 * mean - 1)

    # set  for response biases
    ctau = HalfCauchy('ctau', beta=1)
    cloc = HalfCauchy('cloc', beta=ctau, shape=(n_subs,))
    c0 = Deterministic('c0', cloc / (1 + cloc))

    # compute response noise and response bias modulated expected free energy
    G = Deterministic('G', beta[None, :, None] * U + log([c0, 1 - c0]).T[None, :, :])

    # compute response probability for the pre-reversal and the reversal phase of the hierarchical priorexperiment
    nzero = tt.nonzero(~nans[:last])
    p = Deterministic('p', tt.nnet.softmax(G[:last][nzero]))

    # set observation likelihood of responses
    responses = inp[:last, :, 0][~nans[:last]]
    Categorical('obs', p=p, observed=responses)
# fit the model
with hmm:
    approx = fit(method='advi', n=20000, progressbar=True)

return approx

#################### plot trace #######################
filename = ‘approx_hmm.data’
f = open(filename, ‘rb’)
approx_hmm = pickle.load(f)
f.close()
nsample = 1000
hmm_trace = approx_hmm.sample(nsample, include_transformed=True)
az.plot_trace(hmm_trace[‘G’], combined=True)

I think I’v got your points. Your suggestion is that I shouldn’t fit everyone in a single model data. Is it?