Jags for finite mixture model with two different parametric distribution

29 Views Asked by At

I am having a problem running a finite mixture model with two different parametric distributions. I am getting this error

Error in jags.model(textConnection(mod1_string), data = data1_jags, inits = inits1, : Error in node Ones[1] Invalid parent values

Define the JAGS model mod1_string <- " model { for (i in 1:n) {

# first distribution: the negative binomial part
y_nb[i] ~ dnegbin(prob[i], phi)
prob[i] <- phi / (phi + eps[i] * mu[i])
log(mu[i]) <- b1[V[i]] * x6[i] + b2[V[i]] * x28[i]
eps[i] ~ dgamma(f[i], t[V[i]])
f[i] <- 1 + z[i] 
z[i] ~ dbern(k[V[i]])
V[i] ~ dcat(W[])

# second distribution: Poisson
y_pois[i] ~ dpois(mu2[i])
mu2[i] <- b1[P[i]] * x6[i] + b2[P[i]] * x28[i]
P[i] ~ dcat(W[])

# The latent class part using dcat
component_chosen[i] ~ dcat(probs)

# Define ld_comp
ld_comp[i, 1] <- y_nb[i]
ld_comp[i, 2] <- y_pois[i]

# Select one of these two densities and normalize with a Constant
density[i] <- exp(ld_comp[i, component_chosen[i]])

# Ones node
Ones[i] ~ dbern(density[i])

   }

for (j in 1:2) { # Priors b1[j] ~ dnorm(0, 0.01) b2[j] ~ dnorm(0, 0.01) k[j] ~ dunif(0, 1) t[j] <- (1 - k[j]) / k[j] mean.eps[j] <- (t[j] + 2) / (t[j] * (t[j] + 1)) log.mean.eps[j] <- log(mean.eps[j]) }

# Priors for 2 parameters using a dirichlet distribution
#probs ~ ddirch(c(1, 1))
probs~ ddirch(c(1, 1))

Priors for W

 W[1] ~ dunif(0, 1)   W[2] <- 1 - W[1]
 # Prior for phi   phi ~ dgamma(0.1, 0.1)

}"

Set seed and prepare data set.seed(72)

data1_jags <- list(y_nb = y,y_pois=y, n = nrow(data), x6 = x6, x28 = x28)

params1 <- c("b1", "b2", "k", "phi")

Define initial values inits1 <- function() { inits <- list("b1" = c(0, 0), "b2" = c(0, 0), "k" = c(0.1, 0.5), "phi" = 5) }

Create the JAGS model mod1 <- jags.model(textConnection(mod1_string), data = data1_jags, inits =

inits1, n.chains = 3)

Update the model update(mod1, 1000)

Run the model mod1_sim <- coda.samples(model = mod1, variable.names = params1, n.iter = 5e3, n.thin = 50)

Combine the chains mod1_csim <- do.call(rbind, mod1_sim)

Model checking

Convergence diagnostics par(mar = c(3, 3, 1, 1)) # Set smaller margins (bottom, left, top, right) plot(mod1_sim)

gelman.diag(mod1_sim) autocorr.diag(mod1_sim) effectiveSize(mod1_sim) summary(mod1_sim) acf(mod1_csim, lag.max = 100)

Define the JAGS model mod1_string <- " model { for (i in 1:n) {

# first distribution: the negative binomial part
y_nb[i] ~ dnegbin(prob[i], phi)
prob[i] <- phi / (phi + eps[i] * mu[i])
log(mu[i]) <- b1[V[i]] * x6[i] + b2[V[i]] * x28[i]
eps[i] ~ dgamma(f[i], t[V[i]])
f[i] <- 1 + z[i] 
z[i] ~ dbern(k[V[i]])
V[i] ~ dcat(W[])

# second distribution: Poisson
y_pois[i] ~ dpois(mu2[i])
mu2[i] <- b1[P[i]] * x6[i] + b2[P[i]] * x28[i]
P[i] ~ dcat(W[])

# The latent class part using dcat
component_chosen[i] ~ dcat(probs)

# Define ld_comp
ld_comp[i, 1] <- y_nb[i]
ld_comp[i, 2] <- y_pois[i]

# Select one of these two densities and normalize with a Constant
density[i] <- exp(ld_comp[i, component_chosen[i]])

# Ones node
Ones[i] ~ dbern(density[i])   }

for (j in 1:2) { # Priors b1[j] ~ dnorm(0, 0.01) b2[j] ~ dnorm(0, 0.01) k[j] ~ dunif(0, 1) t[j] <- (1 - k[j]) / k[j] mean.eps[j] <- (t[j] + 2) / (t[j] * (t[j] + 1)) log.mean.eps[j] <- log(mean.eps[j]) }

# Priors for 2 parameters using a dirichlet distribution
#probs ~ ddirch(c(1, 1))
probs~ ddirch(c(1, 1))

Priors for W

 W[1] ~ dunif(0, 1)   W[2] <- 1 - W[1]
 # Prior for phi   phi ~ dgamma(0.1, 0.1)

}"

Set seed and prepare data set.seed(72)

data1_jags <- list(y_nb = y,y_pois=y, n = nrow(data), x6 = x6, x28 = x28)

params1 <- c("b1", "b2", "k", "phi")

Define initial values inits1 <- function() { inits <- list("b1" = c(0, 0), "b2" = c(0, 0), "k" = c(0.1, 0.5), "phi" = 5) }

Create the JAGS model mod1 <- jags.model(textConnection(mod1_string), data = data1_jags, inits =

inits1, n.chains = 3)

Update the model update(mod1, 1000)

Run the model mod1_sim <- coda.samples(model = mod1, variable.names = params1, n.iter = 5e3, n.thin = 50)

Combine the chains mod1_csim <- do.call(rbind, mod1_sim)

Model checking

Convergence diagnostics par(mar = c(3, 3, 1, 1)) # Set smaller margins (bottom, left, top, right) plot(mod1_sim)

gelman.diag(mod1_sim) autocorr.diag(mod1_sim) effectiveSize(mod1_sim) summary(mod1_sim) acf(mod1_csim, lag.max = 100)

0

There are 0 best solutions below