#
#
########## Bayesian t-tests (and one way ANOVA) using the "BayesFactor" package ##########
library(foreign)
example.1 <- read.spss("http://www.unt.edu/rss/class/Jon/R_SC/Module3/ExampleData1.sav",
use.value.labels=TRUE, max.value.labels=Inf, to.data.frame=TRUE)
summary(example.1)
library(Rcmdr)
library(abind)
# Then load the library 'BayesFactor' into the current session.
library(BayesFactor)
#### T-test example.
# Summary for types of Candy.
numSummary(example.1$Recall1 , groups=example.1$Candy,statistics=c("mean", "sd"))
boxplot(example.1$Recall1 ~ example.1$Candy, col = "lightgreen")
# Levene's test of Homogeneity of Variances ("var").
tapply(example.1$Recall1, example.1$Candy, var, na.rm=TRUE)
leveneTest(example.1$Recall1, example.1$Candy, center=median)
# First, conduct the traditional t-test.
t.t1 <- t.test(Recall1~Candy, alternative="less", conf.level=.95, var.equal=TRUE, data=example.1)
t.t1
attach(example.1)
x1 <- split(Recall1, Candy)
x1
detach(example.1)
library(MBESS)
smd(x1$Skittles, x1$None)
# Next, conduct the Bayes Factor t-test; which returns "a scalar giving the Bayes factor IN FAVOR
# of the ALTERNATIVE HYPOTHESIS that the effect size is NOT zero." (Rouder & Morey, ttest.Quad function help document).
t.b1 <- ttest.tstat(t = -7.7566, n1 = 27, n2 = 27, rscale = "medium")
t.b1
t.b2 <- ttest.tstat(t = -7.7566, n1 = 27, n2 = 27, rscale = "medium")
t.b2
help(ttest.tstat)
#### One Way ANOVA example.
# Summary for types of distraction.
numSummary(example.1$Recall1 , groups=example.1$Distraction,statistics=c("mean", "sd"))
# Levene's test of Homogeneity of Variances ("var").
tapply(example.1$Recall1, example.1$Distraction, var, na.rm=TRUE)
leveneTest(example.1$Recall1, example.1$Distraction, center=median)
boxplot(example.1$Recall1 ~ example.1$Distraction, col = "lightgreen")
# First conduct the traditional ANOVA (Distraction has 3 groups, each with 18 cases).
aov.t1 <- aov(Recall1 ~ Distraction, data=example.1)
summary(aov.t1)
# Second, conduct the Bayes Factor analysis; which returns "a scalar giving the Bayes
# factor in favor of the ALTERNATIVE hypothesis" (Morey, help documentation).
aov.b1 <- oneWayAOV.Fstat(F = 2.1164, N = 18, J = 3, rscale = "medium")
aov.b1
help(oneWayAOV.Fstat)
## Note on interpretation of Bayes Factors: Jeffreys (1961) recommends that odds greater than 3 be considered
## some evidence, odds greater than 10 be considered strong evidence, odds greater than 30 be considered very
## strong evidence for one hypothesis over another. In the one way ANOVA example above, the Bayes Factor value
## is 3.234, which indicates that the NULL hypothesis is 3.234 times more probable than the ALTERNATIVE
## hypothesis, given the data. Kass and Raftery (1995) offer a slightly different strategy for interpreting
## Bayes Factors: 1 to 3.2 not worth mentioning, 3.2 to 10 substantial, 10 to 100 strong, and greater than
## 100 decisive.
# Jeffreys, H. (1961). Theory of probability (3rd ed.). Oxford: Oxford University Press.
# Kass, R. E., & Raftery, A. E. (1995). Bayes Factors. Journal of the American Statistical
# Association, 90, 773 - 795.
# NOTE: the ‘LearnBayes’ package, which is a companion for the book Bayesian Computation with
# R, both of which authored by Jim Albert (2010, 2007); also contains functions for computing
# Bayes Factors.
# Albert, J. (2007). Bayesian computation with R. New York: Springer Science+Business Media, LLC.
# Albert, J. (2010). Package ‘LearnBayes’. Available at CRAN:
# http://cran.r-project.org/web/packages/LearnBayes/index.html
###### Links for some references/resources
# http://www.socsci.uci.edu/~mdlee/WetzelsEtAl2010.pdf
# http://cran.r-project.org/web/packages/mcmc/vignettes/bfst.pdf
# http://www.stat.cmu.edu/~kass/papers/bayesfactors.pdf
# https://r-forge.r-project.org/projects/bayesfactorpcl/
# End: Last updated, Jan. 24, 2012.