# # Semi-conjugate prior with normal likelihood (Section 9.4) # iter <- 10 # Number of iterations of the EM algorithm m <- 500 # Number of replications t <- 10 # Number of iterations n <- 12 xbar <- 119 sxx <- 13045 s0 <- 2700 nu0 <- 11 n0 <- 15 theta0 <- 110 phi0 <- s0/(n0*(nu0-2)) thetabar <- 0 phibar <- 0 thetass <- 0 phiss <- 0 cat("\n") cat("Data quoted in P M Lee, `Bayesian Statistics: An Introduction', \n") cat("Arnold 1989, Section 2.13. Taking n=12, xbar=139, S=13,045 and \n") cat("prior for theta ~ N(theta0,S0/n0(nu0-2)), that is, N(", theta0,",",phi0,"),\n") cat("and for phi independent and such that phi ~ S0 chi_{nu0}^{-2}, \n") cat("that is, phi/",s0," is a chi-squared variate on",nu0,"d.f. \n") cat("\n") cat("Iterations of the EM algorithm give the following values for theta \n") # # EM algorithm theta <- theta0; # Initialize n1 <- nu0 + n for (j in 1:iter) # Iterate iter times { if (j-1 == 5*floor((j-1)/5)) cat("\n") s1 <- s0+sxx+n*(xbar-theta)*(xbar-theta) theta1 <- (theta0/phi0+n*xbar/(s1/n1))/(1/phi0+n/(s1/n1)) theta <- theta1 cat(theta," ") } cat("\n") # # Gibbs sampler phi <- sxx/(n-1) # Initialize thetafinal <- rep(0,m) phifinal <- rep(0,m) for (j in 1:m) # Replicate m times { for (s in 1:t) # Iterate t times { phi1 <- 1/((1/phi0)+(n/phi)) theta1 <- phi1*((theta0/phi0)+(n*xbar/phi)) # theta | phi ~ N(theta1,phi1) theta <- theta1+sqrt(phi1)*rnorm(1) # s1=s0+sum(x(i)-theta)^2 s1 <- s0+sxx+n*(xbar-theta)*(xbar-theta) # phi | theta ~ s1*\chi_{\nu1}^{-2} phi <- s1/rchisq(1,nu0+n) } thetafinal[j] <- theta phifinal[j] <- phi } thetabar <- mean(thetafinal) phibar <- mean(phifinal) thetavar <- var(thetafinal) phivar <- var(phifinal) cat("\n") cat("The Gibbs sampler gives rise to the following conclusions: \n") cat("We deduce posterior for theta has mean",thetabar,"and variance", thetavar,"\n") cat("and that posterior for phi has mean",phibar,"and variance",phivar,"\n") cat("\n")