Thank you very much for your answer @junpenglao. I followed your advise and used pm.Potential('logp', logp(A, B, C, D, E)) and pm.sample().
I get a rather nasty error which I transcribe below:
$ python del.py
Only 300 samples in chain.
Multiprocess sampling (2 chains in 2 jobs)
CompoundStep
>CompoundStep
>>Slice: [C]
>>Slice: [B]
>>Slice: [A]
>CompoundStep
>>Metropolis: [E]
>>Metropolis: [D]
Sampling 2 chains: 0%| | 0/1600 [00:00<?, ?draws/s]
pymc3.parallel_sampling.RemoteTraceback:
"""
Traceback (most recent call last):
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/theano/compile/function_module.py", line 903, in __call__
self.fn() if output_subset is None else\
ValueError: expected an ndarray
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/parallel_sampling.py", line 73, in run
self._start_loop()
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/parallel_sampling.py", line 113, in _start_loop
point, stats = self._compute_point()
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/parallel_sampling.py", line 139, in _compute_point
point, stats = self._step_method.step(self._point)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/compound.py", line 29, in step
point = method.step(point)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/compound.py", line 33, in step
point = method.step(point)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/arraystep.py", line 142, in step
apoint = self.astep(bij.map(point), *inputs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/slicer.py", line 56, in astep
y = logp(q) - nr.standard_exponential()
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/blocking.py", line 257, in __call__
return self.fa(self.fb(x))
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/model.py", line 1107, in __call__
return self.f(**state)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/theano/compile/function_module.py", line 917, in __call__
storage_map=getattr(self.fn, 'storage_map', None))
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/theano/gof/link.py", line 325, in raise_with_op
reraise(exc_type, exc_value, exc_trace)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/six.py", line 692, in reraise
raise value.with_traceback(tb)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/theano/compile/function_module.py", line 903, in __call__
self.fn() if output_subset is None else\
ValueError: expected an ndarray
Apply node that caused the error: MakeVector{dtype='float64'}(__logp_A_interval__, __logp_B_interval__, __logp_C_interval__, __logp_D, __logp_E, logp)
Toposort index: 9
Inputs types: [TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar)]
Inputs shapes: [(), (), (), (), (), ()]
Inputs strides: [(), (), (), (), (), ()]
Inputs values: [array(-1.38629436), array(-1.38629436), array(-1.38629436), array(-2.48490665), array(-3.04452244), -7993.053447180414]
Outputs clients: [[Sum{acc_dtype=float64}(MakeVector{dtype='float64'}.0)]]
Backtrace when the node is created(use Theano flag traceback.limit=N to make it longer):
File "del.py", line 38, in bayesMCMC
trace = pm.sample(300)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 415, in sample
step = assign_step_methods(model, step, step_kwargs=step_kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 151, in assign_step_methods
return instantiate_steppers(model, steps, selected_steps, step_kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 72, in instantiate_steppers
step = step_class(vars=vars, **args)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/arraystep.py", line 65, in __new__
step.__init__([var], *args, **kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/slicer.py", line 47, in __init__
super(Slice, self).__init__(vars, [self.model.fastlogp], **kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/model.py", line 233, in fastlogp
return self.model.fastfn(self.logpt)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/model.py", line 716, in logpt
logp = tt.sum([tt.sum(factor) for factor in factors])
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.
"""
The above exception was the direct cause of the following exception:
ValueError: expected an ndarray
Apply node that caused the error: MakeVector{dtype='float64'}(__logp_A_interval__, __logp_B_interval__, __logp_C_interval__, __logp_D, __logp_E, logp)
Toposort index: 9
Inputs types: [TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar), TensorType(float64, scalar)]
Inputs shapes: [(), (), (), (), (), ()]
Inputs strides: [(), (), (), (), (), ()]
Inputs values: [array(-1.38629436), array(-1.38629436), array(-1.38629436), array(-2.48490665), array(-3.04452244), -7993.053447180414]
Outputs clients: [[Sum{acc_dtype=float64}(MakeVector{dtype='float64'}.0)]]
Backtrace when the node is created(use Theano flag traceback.limit=N to make it longer):
File "del.py", line 38, in bayesMCMC
trace = pm.sample(300)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 415, in sample
step = assign_step_methods(model, step, step_kwargs=step_kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 151, in assign_step_methods
return instantiate_steppers(model, steps, selected_steps, step_kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 72, in instantiate_steppers
step = step_class(vars=vars, **args)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/arraystep.py", line 65, in __new__
step.__init__([var], *args, **kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/step_methods/slicer.py", line 47, in __init__
super(Slice, self).__init__(vars, [self.model.fastlogp], **kwargs)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/model.py", line 233, in fastlogp
return self.model.fastfn(self.logpt)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/model.py", line 716, in logpt
logp = tt.sum([tt.sum(factor) for factor in factors])
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "del.py", line 102, in <module>
main()
File "del.py", line 12, in main
trace = bayesMCMC()
File "del.py", line 38, in bayesMCMC
trace = pm.sample(300)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 449, in sample
trace = _mp_sample(**sample_args)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/sampling.py", line 999, in _mp_sample
for draw in sampler:
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/parallel_sampling.py", line 305, in __iter__
draw = ProcessAdapter.recv_draw(self._active)
File "/home/gabriel/anaconda3/envs/asteca3/lib/python3.7/site-packages/pymc3/parallel_sampling.py", line 223, in recv_draw
six.raise_from(RuntimeError('Chain %s failed.' % proc.chain), old)
File "<string>", line 3, in raise_from
RuntimeError: Chain 1 failed.
I don’t think I could write the likelihood in Theano because my actual code for generating the synthetic data is quite large and complicated, not just a random generator like I use in this example.