make sure you are using the pymc3 envoirnment
import pymc3 as pm
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from scipy import stats
import pymc3 as pm
import Pythonic_files as pf
import arviz as az
%matplotlib inline
%config InlineBackend.figure_format='retina'
# conda install seaborn
pm.__version__
'3.8'
import warnings
warnings.filterwarnings('ignore',category=FutureWarning)
flips = 2000
Heads_prob = np.random.random()
flip_result = stats.bernoulli.rvs(p=Heads_prob,size=flips)
print(Heads_prob, flip_result.sum())
0.2128971395937982 422
with pm.Model() as Coin_flip:
p1 = pm.Beta('theta', alpha=2, beta=2)
y = pm.Bernoulli('Heads', p=p1, observed=flip_result)
trace = pm.sample(1000, chains=4)
Auto-assigning NUTS sampler... Initializing NUTS using jitter+adapt_diag... Multiprocess sampling (4 chains in 4 jobs) NUTS: [theta] Sampling 4 chains, 0 divergences: 100%|██████████| 6000/6000 [00:04<00:00, 1363.17draws/s]
pm.plot_posterior(trace,figsize=(13,6))
plt.show()
az.plot_trace(trace, figsize=(26,6))
plt.show()
bob = trace.get_values('theta')
trace.varnames
['theta_logodds__', 'theta']
bob
array([0.20175365, 0.21293218, 0.21293218, ..., 0.2163057 , 0.21489953, 0.20708337])
plt.hist(bob)
plt.show()
sns.displot(bob, kind = 'kde')
plt.show()
plt.plot(bob,',')
plt.show()
plt.plot(bob[:100:4],'.-')
plt.plot(bob[1:100:4],'.-')
[<matplotlib.lines.Line2D at 0x7f8300807eb8>]
bob1 = trace.get_values('theta',burn=1000, chains=[1])
bob2 = trace.get_values('theta',burn=1000, chains=[2])
plt.plot(bob1[:30],'.-')
plt.plot(bob2[:30],'.-')
[<matplotlib.lines.Line2D at 0x7f834125eef0>]
len(trace.theta)
4000
az.summary(trace)
mean | sd | hdi_3% | hdi_97% | mcse_mean | mcse_sd | ess_mean | ess_sd | ess_bulk | ess_tail | r_hat | |
---|---|---|---|---|---|---|---|---|---|---|---|
theta | 0.211 | 0.009 | 0.195 | 0.229 | 0.0 | 0.0 | 1629.0 | 1629.0 | 1629.0 | 2725.0 | 1.0 |
az.plot_posterior(trace, figsize=(13,6), rope=[.205,.216], ref_val=.207)
plt.show()
intercept = 5
slope = 7
num_points = 200
error = 2
x= np.random.random(num_points)
y = intercept + slope*x + np.random.normal(0,error, num_points)
plt.figure(figsize=(13,6))
plt.scatter(x,y,alpha=0.5)
plt.show()
data = dict(x=x,y2=y)
with pm.Model() as lin_model:
pm.glm.GLM.from_formula('y2 ~ x', data)
trace = pm.sample(4000, cores=2,tune=1000)
Auto-assigning NUTS sampler... Initializing NUTS using jitter+adapt_diag... Multiprocess sampling (2 chains in 2 jobs) NUTS: [sd, x, Intercept] Sampling 2 chains, 0 divergences: 100%|██████████| 10000/10000 [00:18<00:00, 527.51draws/s]
az.plot_trace(trace, figsize=(26,18))
plt.show()
plt.figure(figsize=(13,6))
plt.scatter(x,y, alpha = 0.4, label = 'points')
pm.plot_posterior_predictive_glm(trace, samples=50, label = 'posterior samples', alpha = 0.4)
plt.plot(x, intercept + slope*x, c='r', label='true line')
plt.legend()
plt.show()