Model selection for a linear mixed model using JAGS

These data consist of $$N=20$$ children. For each child, we record their jaw bone height at $$M=4$$ ages: 8, 8.5, 9 and 9.5 years. We fit a mixed model with a random interpect and random slope for each child, and compare to simpler model to evaluate whether these random effeccts are needed.

#Jaw data
M   <- 4
N   <- 20
age <- c(8.0, 8.5, 9.0, 9.5)
Y   <- c(47.8, 48.8, 49.0, 49.7,
46.4, 47.3, 47.7, 48.4,
46.3, 46.8, 47.8, 48.5,
45.1, 45.3, 46.1, 47.2,
47.6, 48.5, 48.9, 49.3,
52.5, 53.2, 53.3, 53.7,
51.2, 53.0, 54.3, 54.5,
49.8, 50.0, 50.3, 52.7,
48.1, 50.8, 52.3, 54.4,
45.0, 47.0, 47.3, 48.3,
51.2, 51.4, 51.6, 51.9,
48.5, 49.2, 53.0, 55.5,
52.1, 52.8, 53.7, 55.0,
48.2, 48.9, 49.3, 49.8,
49.6, 50.4, 51.2, 51.8,
50.7, 51.7, 52.7, 53.3,
47.2, 47.7, 48.4, 49.5,
53.3, 54.6, 55.1, 55.3,
46.2, 47.5, 48.1, 48.4,
46.3, 47.6, 51.3, 51.8)

Y <- matrix(Y,20,4,byrow=TRUE)


Plot the data for each subject:

plot(NA,xlim=range(age),ylim=range(Y),xlab="Age",ylab="Bone height")
for(j in 1:N){
lines(age,Y[j,])
points(age,Y[j,])
}


library(rjags)


MODEL 1: The full random slopes model

model_string <- "model{

# Likelihood
for(i in 1:N){
for(j in 1:M){
Y[i,j]   ~ dnorm(mu[i,j],inv.var)
mu[i,j] <- alpha[i] + beta[i]*age[j]
}
}

# Random effects distributions
for(i in 1:N){
alpha[i] ~ dnorm(mu.alpha,inv.var.alpha)
beta[i]  ~ dnorm(mu.beta,inv.var.beta)
}

# Prior
inv.var       ~ dgamma(0.1, 0.1)
inv.var.alpha ~ dgamma(0.1, 0.1)
inv.var.beta  ~ dgamma(0.1, 0.1)
mu.alpha      ~ dnorm(0,0.001)
mu.beta       ~ dnorm(0,0.001)

}"

model <- jags.model(textConnection(model_string),
data = list(Y=Y,N=N,M=M,age=age),
n.chains=3)

## Compiling model graph
##    Resolving undeclared variables
##    Allocating nodes
##    Graph Size: 294
##
## Initializing model

update(model, 10000, progress.bar="none"); # Burnin for 10000 samples

DIC1 <- dic.samples(model,
variable.names=c("alpha","beta"),
n.iter=50000, progress.bar="none")

DIC1

## Mean deviance:  193.5
## penalty 23.28
## Penalized deviance: 216.8


MODEL 2: In this model the subject-specific slopes are treated as fixed effects

model_string <- "model{

# Likelihood
for(i in 1:N){
for(j in 1:M){
Y[i,j]   ~ dnorm(mu[i,j],inv.var)
mu[i,j] <- alpha[i] + beta[i]*age[j]
}
}

# Prior for the intercepts and slopes
for(i in 1:N){
alpha[i] ~ dnorm(0,0.01)
beta[i]  ~ dnorm(0,0.01)
}
inv.var       ~ dgamma(0.1, 0.1)

}"

model <- jags.model(textConnection(model_string),
data = list(Y=Y,N=N,M=M,age=age),
n.chains=3)

## Compiling model graph
##    Resolving undeclared variables
##    Allocating nodes
##    Graph Size: 290
##
## Initializing model

update(model, 10000, progress.bar="none"); # Burnin for 10000 samples

DIC2 <- dic.samples(model,
variable.names=c("alpha","beta"),
n.iter=50000, progress.bar="none")

DIC2

## Mean deviance:  328.2
## penalty 31.22
## Penalized deviance: 359.4


MODEL 3: In this model all subjects have the some mean

model_string <- "model{

# Likelihood
for(i in 1:N){
for(j in 1:M){
Y[i,j]   ~ dnorm(mu[i,j],inv.var)
mu[i,j] <- alpha + beta*age[j]
}
}

# Priors
alpha   ~ dnorm(0,0.01)
beta    ~ dnorm(0,0.01)
inv.var ~ dgamma(0.1, 0.1)

}"

model <- jags.model(textConnection(model_string),
data = list(Y=Y,N=N,M=M,age=age),
n.chains=3)

## Compiling model graph
##    Resolving undeclared variables
##    Allocating nodes
##    Graph Size: 100
##
## Initializing model

update(model, 10000, progress.bar="none")

DIC3 <- dic.samples(model,
variable.names=c("alpha","beta"),
n.iter=50000, progress.bar="none")

DIC3

## Mean deviance:  380.7
## penalty 2.908
## Penalized deviance: 383.7


Compare models

  DIC1   # Random effects

## Mean deviance:  193.5
## penalty 23.28
## Penalized deviance: 216.8

  DIC2   # Separate fixed effects

## Mean deviance:  328.2
## penalty 31.22
## Penalized deviance: 359.4

  DIC3   # Shared fixed effects

## Mean deviance:  380.7
## penalty 2.908
## Penalized deviance: 383.7


The original random effects model has the smallest DIC and is therefore prefered.