Sampling from distribution.dist

How to make posterior sampling from distribution.dist ?
For example, I would like to sample from n in the following code, is it possible?

    import numpy as np
    import matplotlib.pyplot as plt
    import pymc3 as pm
    import theano.tensor as tt
    import scipy.special
    import arviz as az
    true_p = 0.7
    n = 4
    N_data = 160
    true_mean = 10
    true_sigma = 1

    s = np.random.binomial(n, true_p, N_data)
    mean = [true_mean * (i + 1) for i in range(n + 1)]
    data = np.concatenate(
        [np.random.normal(mean[i], true_sigma, len(s[s == i])) for i in range(n + 1)])


    def Bin_pmf(n, p):
        return [scipy.special.binom(n, i) * (p**i) * (1 - p)**(n - i) for i in range(n + 1)]


    k = 8
    l = 3
    n_dist = [n-2, n, n+2]

    with pm.Model() as fixed_n_model:
        a = [tt.vector() for _ in range(l)]
        b = [tt.vector() for _ in range(l)]
        p = pm.Beta("p", 2, 2, shape=l)
        mu = pm.Uniform('mu', 0, 20)
        alpha = pm.Normal("alpha", 20, 20)
        beta = pm.Normal("beta", 20, 20) 
        n = pm.BetaBinomial.dist(alpha, beta, l-1) # TODO make a posterior sampling for n
        for i in range(l):

            t = [(j + 1) for j in range(n_dist[i] + 1)] + \
                [0 for i in range(k - n_dist[i] - 1)]
            a[i] = tt.stack([j * mu for j in t])
            b[i] = tt.stack(Bin_pmf(n_dist[i], p[i]) +
                            [0 for j in range(k - n_dist[i] - 1)])
        sigma = pm.HalfCauchy('sigma', 2)
        components = [pm.Normal.dist(mu=a[i],
                                     sigma=sigma, shape=k) for i in range(l)]
        like = [pm.Mixture.dist(w=b[i],
                                comp_dists=components[i]) for i in range(l)]
        like = pm.Mixture("like", w=tt.stack([np.exp(n.logp(i)) for i in range(
            l)]), comp_dists=[like[i] for i in range(l)], observed=data)


    with fixed_n_model:
        start = pm.find_MAP()
        start["p"] = 0.8
        start["mu"] = 10
        start["sigma"] = 0.5
        step1 = pm.NUTS([sigma, p, mu], max_treedepth=15, target_accept=0.95)
    # =============================================================================
    #     step2 = pm.Metropolis([beta],cores=3)
    # =============================================================================
        trace = pm.sample(10000, step=[step1])
    pm.traceplot(trace)

    az.plot_posterior(trace)
    ppc = pm.sample_posterior_predictive(trace, samples=500, model=fixed_n_model)
    fig1, ax1 = plt.subplots()
    ax1.hist(np.asarray(ppc['like'])[:, 1], round(N_data/2))
    ax1.hist(data, bins=round(N_data/2))