Initialy Iâm using spyder, but I tryed with jupyter and got the same problem. And I did not really get how to find the exeption. Here the prob :
def my_model(theta,x):
[...]
return prediction
def my_loglike(theta,x,data, sigma):
model = my_model(theta, x)
return multivariate_normal(data, sigma).pdf(model)
class LogLike(tt.Op):
itypes = [tt.dvector] # expects a vector of parameter values when called
otypes = [tt.dscalar] # outputs a single scalar value (the log likelihood)
def __init__(self, loglike, data, x, sigma):
# add inputs as class attributes
self.likelihood = loglike
self.data = data
self.x = x
self.sigma = sigma
def perform(self, node, inputs, outputs):
# the method that is used when calling the Op
theta, = inputs # this will contain my variables
# call the log-likelihood function
logl = self.likelihood(theta, self.x, self.data, self.sigma)
outputs[0][0] = np.array(logl) # output the log-likelihood
ndraws = 500
nburn = 0
chains=2
njobs=2
# create our Op
logl = LogLike(my_loglike, data, x, sigma)
def my_mu(v):
return logl(v)
tim_init=time.process_time()
# use PyMC3 to sampler from log-likelihood
if __name__ == "__main__":
with pm.Model() as model1:
var1 = pm.Triangular('var1', lower=0.2, upper=1.8, c=1)
var2 = pm.Triangular('var2', lower=0.2, upper=1.8, c=1)
var3 = pm.Triangular('var3', lower=0.2, upper=1.8, c=1)
var4 = pm.Triangular('var4', lower=0.2, upper=1.8, c=1)
# convert m and c to a tensor vector
theta = tt.as_tensor_variable([var1, var2, var3,var4])#,var4, var5, var6])
# use a DensityDist (use a lamdba function to "call" the Op)
pm.DensityDist('likelihood',my_mu , observed={'v': theta})#
step = pm.Slice()
trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True, chains=chains, step=step)
et
---------------------------------------------------------------------------
BrokenPipeError Traceback (most recent call last)
<ipython-input-2-48365da83eef> in <module>
498 pm.DensityDist('likelihood', my_mu, observed={'v': theta})# lambda v: logl(v)
499 step = pm.Slice()
--> 500 trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True, chains=chains, step=step)
501
502
~\Anaconda2\envs\pymc\lib\site-packages\pymc3\sampling.py in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, nuts_kwargs, step_kwargs, progressbar, model, random_seed, live_plot, discard_tuned_samples, live_plot_kwargs, compute_convergence_checks, use_mmap, **kwargs)
437 _print_step_hierarchy(step)
438 try:
--> 439 trace = _mp_sample(**sample_args)
440 except pickle.PickleError:
441 _log.warning("Could not pickle model, sampling singlethreaded.")
~\Anaconda2\envs\pymc\lib\site-packages\pymc3\sampling.py in _mp_sample(draws, tune, step, chains, cores, chain, random_seed, start, progressbar, trace, model, use_mmap, **kwargs)
984 sampler = ps.ParallelSampler(
985 draws, tune, chains, cores, random_seed, start, step,
--> 986 chain, progressbar)
987 try:
988 try:
~\Anaconda2\envs\pymc\lib\site-packages\pymc3\parallel_sampling.py in __init__(self, draws, tune, chains, cores, seeds, start_points, step_method, start_chain_num, progressbar)
311 draws, tune, step_method, chain + start_chain_num, seed, start
312 )
--> 313 for chain, seed, start in zip(range(chains), seeds, start_points)
314 ]
315
~\Anaconda2\envs\pymc\lib\site-packages\pymc3\parallel_sampling.py in <listcomp>(.0)
311 draws, tune, step_method, chain + start_chain_num, seed, start
312 )
--> 313 for chain, seed, start in zip(range(chains), seeds, start_points)
314 ]
315
~\Anaconda2\envs\pymc\lib\site-packages\pymc3\parallel_sampling.py in __init__(self, draws, tune, step_method, chain, seed, start)
202 )
203 # We fork right away, so that the main process can start tqdm threads
--> 204 self._process.start()
205
206 @property
~\Anaconda2\envs\pymc\lib\multiprocessing\process.py in start(self)
103 'daemonic processes are not allowed to have children'
104 _cleanup()
--> 105 self._popen = self._Popen(self)
106 self._sentinel = self._popen.sentinel
107 # Avoid a refcycle if the target function holds an indirect
~\Anaconda2\envs\pymc\lib\multiprocessing\context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
--> 223 return _default_context.get_context().Process._Popen(process_obj)
224
225 class DefaultContext(BaseContext):
~\Anaconda2\envs\pymc\lib\multiprocessing\context.py in _Popen(process_obj)
320 def _Popen(process_obj):
321 from .popen_spawn_win32 import Popen
--> 322 return Popen(process_obj)
323
324 class SpawnContext(BaseContext):
~\Anaconda2\envs\pymc\lib\multiprocessing\popen_spawn_win32.py in __init__(self, process_obj)
63 try:
64 reduction.dump(prep_data, to_child)
---> 65 reduction.dump(process_obj, to_child)
66 finally:
67 set_spawning_popen(None)
~\Anaconda2\envs\pymc\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 '''Replacement for pickle.dump() using ForkingPickler.'''
---> 60 ForkingPickler(file, protocol).dump(obj)
61
62 #
BrokenPipeError: [Errno 32] Broken pipe