How to used **kwargs to increase target_accepte

My modle like this,

with basic_model:
    epohs = 5000

    kwargs = {'target_accept':0.85}
    step = pm.NUTS(**kwargs)

    trace = pm.sample(epohs, cores=1, step=step)
    pm.traceplot(trace)

But, I got errors,

Traceback (most recent call last):
File “trymcmc.py”, line 337, in
run()
File “trymcmc.py”, line 329, in run
step = pm.NUTS(**kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\hmc\nuts.py”, line 15
2, in init
super().init(vars, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\hmc\base_hmc.py”, lin
e 72, in init
super().init(vars, blocked=blocked, model=model, dtype=dtype, **theano_k
wargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\arraystep.py”, line 2
28, in init
vars, dtype=dtype, **theano_kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\model.py”, line 723, in logp_dlogp
_function
return ValueGradFunction(self.logpt, grad_vars, extra_vars, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\model.py”, line 456, in init
grad = tt.grad(self._cost_joined, self._vars_joined)
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 605, in grad
grad_dict, wrt, cost_name)
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1371, in _popu
late_grad_dict
rval = [access_grad_cache(elem) for elem in wrt]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1371, in
rval = [access_grad_cache(elem) for elem in wrt]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in acces
s_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in acces
s_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1162, in acces
s_term_cache
new_output_grads)
File “C:\Miniconda3\lib\site-packages\theano\gof\op.py”, line 711, in L_op
return self.grad(inputs, output_grads)
AttributeError: ‘FromFunctionOp’ object has no attribute ‘grad’

How can I fix it. Thanks!

hi, you can try the answer below:

nuts_kwargs = dict(target_accept=.90)

Thanks, it still does not work well.
Neither

with basic_model:

    epohs = 1500 
    trace = pm.sample(epohs, cores=1, nuts_kwargs=dict(target_accept=0.95))

or

    kwargs = dict(target_accept=.90)
    trace = pm.sample(epohs, cores=1, **kwargs)

It seems the NUTS sampler failed, it changed to elementwise auto-assignment, pls see the output.
The used sampler method does not support the target_accept keywords?

Auto-assigning NUTS sampler…
> Initializing NUTS using jitter+adapt_diag…
> Initializing NUTS failed. Falling back to elementwise auto-assignment.
Traceback (most recent call last):
File “C:\Miniconda3\lib\site-packages\pymc3\sampling.py”, line 396, in sample
progressbar=progressbar, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\sampling.py”, line 1515, in init_nuts
step = pm.NUTS(potential=potential, model=model, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\hmc\nuts.py”, line 152, in init
super().init(vars, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\hmc\base_hmc.py”, line 72, in init
super().init(vars, blocked=blocked, model=model, dtype=dtype, **theano_kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\step_methods\arraystep.py”, line 228, in init
vars, dtype=dtype, **theano_kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\model.py”, line 723, in logp_dlogp_function
return ValueGradFunction(self.logpt, grad_vars, extra_vars, **kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\model.py”, line 456, in init
grad = tt.grad(self._cost_joined, self._vars_joined)
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 605, in grad
grad_dict, wrt, cost_name)
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1371, in _populate_grad_dict
rval = [access_grad_cache(elem) for elem in wrt]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1371, in
rval = [access_grad_cache(elem) for elem in wrt]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1021, in
output_grads = [access_grad_cache(var) for var in node.outputs]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1326, in access_grad_cache
term = access_term_cache(node)[idx]
File “C:\Miniconda3\lib\site-packages\theano\gradient.py”, line 1162, in access_term_cache
new_output_grads)
File “C:\Miniconda3\lib\site-packages\theano\gof\op.py”, line 711, in L_op
return self.grad(inputs, output_grads)
AttributeError: ‘FromFunctionOp’ object has no attribute ‘grad’

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File “c:\Users\xialang2012.vscode\extensions\ms-python.python-2019.8.30787\pythonFiles\ptvsd_launcher.py”, line 43, in
main(ptvsdArgs)
File “c:\Users\xialang2012.vscode\extensions\ms-python.python-2019.8.30787\pythonFiles\lib\python\ptvsd_main_.py”, line 432, in main
run()
File “c:\Users\xialang2012.vscode\extensions\ms-python.python-2019.8.30787\pythonFiles\lib\python\ptvsd_main_.py”, line 316, in run_file
runpy.run_path(target, run_name=‘main’)
File “C:\Miniconda3\lib\runpy.py”, line 263, in run_path
pkg_name=pkg_name, script_name=fname)
File “C:\Miniconda3\lib\runpy.py”, line 96, in _run_module_code
mod_name, mod_spec, pkg_name, script_name)
File “C:\Miniconda3\lib\runpy.py”, line 85, in _run_code
exec(code, run_globals)
File “c:\Users\xialang2012\Documents\PythonApplication1\PythonApplication1\bgc-parameters-ana\mcmc-lai.py”, line 333, in
run()
File “c:\Users\xialang2012\Documents\PythonApplication1\PythonApplication1\bgc-parameters-ana\mcmc-lai.py”, line 328, in run
trace = pm.sample(epohs, cores=1, nuts_kwargs=dict(target_accept=0.95))
File “C:\Miniconda3\lib\site-packages\pymc3\sampling.py”, line 404, in sample
step = assign_step_methods(model, step, step_kwargs=kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\sampling.py”, line 155, in assign_step_methods
return instantiate_steppers(model, steps, selected_steps, step_kwargs)
File “C:\Miniconda3\lib\site-packages\pymc3\sampling.py”, line 81, in instantiate_steppers
raise ValueError(‘Unused step method arguments: %s’ % unused_args)
ValueError: Unused step method arguments: {‘target_accept’}

The main error seems to related to one of the function in your model does not have gradient. What do you see if you just do pm.sample()?

Thanks for your help.
What I intend to do is using PYMC3 to retrieve some paramters for one biological model. The current model like this

   varNum = 5
   @as_op(itypes=[tt.dvector], otypes=[tt.dvector, tt.dvector])
    def CallOutFunc(x):
        #print(x)
        a, b = synS.GetSynthesis(x) 
        return a, b

with pm.Model() as basic_model:

    # Priors for unknown model parameters
    beta = pm.TruncatedNormal('beta', sigma=5, lower=uniMin, upper=uniMax, shape=varNum)
    sigma = pm.HalfNormal('sigma', sigma=1)

    # Expected value of outcome
    mu, observedYGpp = CallOutFunc(beta)

    # Likelihood (sampling distribution) of observations
    Y_obs = pm.Normal('Y_obs', mu=mu, sigma=sigma, observed=observedYGpp)

def run():
    with basic_model:
        epohs = 3000

        #nuts_kwargs = dict(target_accept=.90)
        #step = pm.NUTS(**nuts_kwargs)

        trace = pm.sample(epohs, cores=1)
        pm.traceplot(trace)
        print(pm.summary(trace))
        pm.save_trace(trace, resourcePath + os.sep + 'result-' + str(epohs) + '.trace')

In synS.GetSynthesis(x), the biological model was called by using the parameter x, and I got the simulation value, then I gave the value to PYMC3 to evaluate it. That the total logic. I do not know if I made some mistakes about the logic.

So far, pm.sample works well, as shown,

PANDAS_TYPES = (pd.Series, pd.DataFrame, pd.Panel)
SynthesisSiteSimulation
Auto-assigning NUTS sampler…
Initializing NUTS using jitter+adapt_diag…
Initializing NUTS failed. Falling back to elementwise auto-assignment.
Sequential sampling (2 chains in 1 job)
CompoundStep

Slice: [beta]
NUTS: [sigma]
0%| | 0/3500 [00:00<?, ?it/s]

and I could get the paramters. When I increasd the number of retrieve paramters, e.g., from 5 to 8, PYMC would ask me to increase the target_accepte. Hence, I tried to add **kwargs to make it work.

When NUTS faiked to init, the Slice was used. It seems the Slice samper does not support target_accepte keyword?

That’s probably exactly the reason. I am surprise that pm.summary(nuts_kwargs = dict(target_accept=.90)) doesnt work, I will see if I can reproduce the error and then raise and issue on Github.

Thanks. If you cannot reproduce the error, I can sent you all the codes for test.