Power law distribution with exponential cutoff

Hi all,

I am conducting a research where I have three predictors and one outcome variable. The outcome variable follows a power law with exponential cutoff distribution. I want to model the relationships between these predictors and the outcome variable and I am following two approaches (after not finding anything about it) and the two approaches seem to be giving me different results. Can anyone please help me in this regard? I would be really thankful. In the following I am providing details of my model:

powm = predictor 1
achm = predcitor 2
affm = predictor 3
datan = outcome variable.

Approach #1

class PowerLawExpCutoff(Continuous):
 def __init__(self, alpha, lam, *args, **kwargs):
        super(PowerLawExpCutoff, self).__init__(*args, **kwargs)
        self.alpha = alpha = tt.as_tensor_variable(alpha)
        self.lam = lam = tt.as_tensor_variable(lam)
        self.mean = 1/alpha
        self.median = self.mean
        self.mode = self.mean

    def logp(self, value):
        alpha = self.alpha
        lam = self.lam
        logp = -alpha*tt.log(value) - lam*value
        return bound(logp, value >= 0, alpha > 0, lam > 0)

with pm.Model() as model:
    alpha = pm.Normal('alpha', mu=0, sigma=1,  testval=10)
    beta1 = pm.Normal('beta1', mu=0, sigma=1,  testval=10)
    beta2 = pm.Normal('beta2', mu=0, sigma=1,  testval=10)
    beta3 = pm.Normal('beta3', mu=0, sigma=1,  testval=10)
    lam = pm.Gamma('lam', alpha=5, beta=1,  testval=5)
    sigma = pm.HalfNormal('sigma', sigma=1)

    # Expected value of outcome
    mu = alpha + tt.dot(powm, beta1) + tt.dot(achm, beta2)+ tt.dot(affm, beta3)

    # Power-law with exponential cutoff likelihood
    y_obs = PowerLawExpCutoff('y_obs', alpha=mu, lam=lam, observed=datan)

    # Inference!
    trace1 = pm.sample(draws=5000, tune=5000, chains=4,cores=1)

# trace plot

# plot posterior

# check the summary stats

Approach #2

def power_law_with_exponential_cutoff_logp(a, b, value):
    return tt.log(a / b) + (a - 1) * tt.log(value / b) - tt.power(value / b, a)

# Define a function to compute the logp
def custom_logp(a, b, value):
    return power_law_with_exponential_cutoff_logp(a, b, value)

# Define the model
with pm.Model() as model:
    # Priors
    a = pm.Exponential("a", 1.)
    b = pm.Exponential("b", 1.)
    beta = pm.Normal("beta", 0, 1, shape=3)

    # Deterministic function
    linear_combination = beta[0]*powm + beta[1]*achm + beta[2]*affm

    # Likelihood
    Y_obs = pm.DensityDist("Y_obs", lambda linear_combination: custom_logp(a, b, linear_combination),

    # Sample from the posterior
    trace2 = pm.sample(draws=2000, tune=2000, cores=1)

# trace plot

# plot posterior

# check the summary stats