Bayesian Statistics: Lecture 9 MCMC: - Markov Chains - Gibbs Sampler
Bayesian Statistics: Lecture 9 MCMC: - Markov Chains - Gibbs Sampler
MCMC:
- Markov chains
- Gibbs sampler
% Data
y=[28 26 33 24 34 -44 27 16 40 -2 ...
29 22 24 21 25 30 23 29 31 19 ...
24 20 36 32 36 28 25 21 28 29 ...
37 25 28 26 30 32 36 26 30 22 ...
36 23 27 27 28 27 31 27 26 33 ...
26 32 32 24 39 28 24 25 32 25 ...
29 27 28 29 16 23];
e=0.05; % prior probability of outlier
ns=1000; % number of update steps
k=[1 10];
n=length(y);
r=ones(size(y)); mu=mean(y); sigma2=var(y); % initial
values
rand('state',0); randn('state',0); % random generator
seeds
% allocate memory for the samples
mu=repmat(mu,1,ns);
sigma2=repmat(sigma2,1,ns);
ris2=zeros(1,n); % count occurrences of r(i)=2
% simulation loop
for is=2:ns
% draw mu from normal
i1=find(r==1); i2=find(r==2);
n1=length(i1); n2=length(i2);
wn=sigma2(is-1)/(n1+n2/10);
mn=(sum(y(i1))/k(1)+sum(y(i2))/k(2))/(n1/k(1)+n2/k(2));
mu(is)=normrnd(mn,sqrt(wn));
% draw sigma2 from invgamma
ns02=sum((y(i1)-mu(is)).^2)/k(1)+sum((y(i2)-mu(is)).^2)/k(2);
tau=gamrnd(n/2,2/ns02);
sigma2(is)=1/tau;
% loop through r's index in random order
nn=randperm(n);
for in=1:n
ii=nn(in);
logp(1)=log(1-e)-0.5*log(k(1))-(y(ii)-mu(is))^2/sigma2(is)/2/k(1);
logp(2)=log(e) -0.5*log(k(2))-(y(ii)-mu(is))^2/sigma2(is)/2/k(2);
p=exp(logp-max(logp));
p=p/norm(p,1);
[notused,r(ii)]=histc(rand,[0 cumsum(p)]); % draw r
end
ris2=ris2+(r==2);
end
% plot Prob(r(i)==2)
subplot(2,1,1)
stem(ris2/ns,'markerfacecolor','blue','markersize',4)
box off, title('Prob(r_i=2)'), xlabel('i')
axis([0 n 0 1])
mu_stats=[mean(mu) std(mu)]
sigma2_stats=[mean(sigma2) std(sigma2)]