CRAN Package Check Results for Package CARBayesST

Last updated on 2019-11-26 00:51:46 CET.

Flavor Version Tinstall Tcheck Ttotal Status Flags
r-devel-linux-x86_64-debian-clang 3.0.1 62.45 203.68 266.13 ERROR
r-devel-linux-x86_64-debian-gcc 3.0.1 54.19 156.10 210.29 ERROR
r-devel-linux-x86_64-fedora-clang 3.0.1 328.71 NOTE
r-devel-linux-x86_64-fedora-gcc 3.0.1 315.79 OK
r-devel-windows-ix86+x86_64 3.0.1 158.00 271.00 429.00 OK
r-devel-windows-ix86+x86_64-gcc8 3.0.1 119.00 257.00 376.00 OK
r-patched-linux-x86_64 3.0.1 57.96 194.72 252.68 OK
r-patched-solaris-x86 3.0.1 394.90 OK
r-release-linux-x86_64 3.0.1 56.34 195.95 252.29 OK
r-release-windows-ix86+x86_64 3.0.1 133.00 355.00 488.00 OK
r-release-osx-x86_64 3.0.1 NOTE
r-oldrel-windows-ix86+x86_64 3.0.1 89.00 240.00 329.00 OK
r-oldrel-osx-x86_64 3.0.1 NOTE

Check Details

Version: 3.0.1
Check: examples
Result: ERROR
    Running examples in 'CARBayesST-Ex.R' failed
    The error most likely occurred in:
    
    > base::assign(".ptime", proc.time(), pos = "CheckExEnv")
    > ### Name: ST.CARadaptive
    > ### Title: Fit a spatio-temporal generalised linear mixed model to data,
    > ### with a spatio-temporal autoregressive process that has an adaptive
    > ### autocorrelation stucture.
    > ### Aliases: ST.CARadaptive
    >
    > ### ** Examples
    >
    > #################################################
    > #### Run the model on simulated data on a lattice
    > #################################################
    > #### set up the regular lattice
    > x.easting <- 1:10
    > x.northing <- 1:10
    > Grid <- expand.grid(x.easting, x.northing)
    > K <- nrow(Grid)
    > N <- 10
    > N.all <- N * K
    >
    >
    > #### set up spatial neighbourhood matrix W
    > distance <- as.matrix(dist(Grid))
    > W <-array(0, c(K,K))
    > W[distance==1] <-1
    >
    >
    > #### Simulate the elements in the linear predictor and the data
    > Q.W <- 0.99 * (diag(apply(W, 2, sum)) - W) + 0.01 * diag(rep(1,K))
    > Q.W.inv <- solve(Q.W)
    > phi.temp <- mvrnorm(n=1, mu=rep(0,K), Sigma=(0.1 * Q.W.inv))
    > phi <- phi.temp
    > for(i in 2:N)
    + {
    + phi.temp2 <- mvrnorm(n=1, mu=(0.8 * phi.temp), Sigma=(0.1 * Q.W.inv))
    + phi.temp <- phi.temp2
    + phi <- c(phi, phi.temp)
    + }
    > jump <- rep(c(rep(2, 70), rep(4, 30)),N)
    > LP <- jump + phi
    > fitted <- exp(LP)
    > Y <- rpois(n=N.all, lambda=fitted)
    >
    >
    > #### Run the model
    > ## Not run:
    > ##D model <- ST.CARadaptive(formula=Y~1, family="poisson", W=W, burnin=10000,
    > ##D n.sample=50000)
    > ## End(Not run)
    >
    >
    > #### Toy example for checking
    > model <- ST.CARadaptive(formula=Y~1, family="poisson", W=W, burnin=10,
    + n.sample=50)
    Setting up the model.
     ----------- FAILURE REPORT --------------
     --- failure: the condition has length > 1 ---
     --- srcref ---
    :
     --- package (from environment) ---
    CARBayesST
     --- call from context ---
    poisson.CARadaptive(formula = formula, data = data, W = W, burnin = burnin,
     n.sample = n.sample, thin = thin, prior.mean.beta = prior.mean.beta,
     prior.var.beta = prior.var.beta, prior.tau2 = prior.tau2,
     rho = rho, epsilon = epsilon, MALA = MALA, verbose = verbose)
     --- call from argument ---
    if (class(W) == "matrix") W <- as.spam(W)
     --- R stacktrace ---
    where 1: poisson.CARadaptive(formula = formula, data = data, W = W, burnin = burnin,
     n.sample = n.sample, thin = thin, prior.mean.beta = prior.mean.beta,
     prior.var.beta = prior.var.beta, prior.tau2 = prior.tau2,
     rho = rho, epsilon = epsilon, MALA = MALA, verbose = verbose)
    where 2: ST.CARadaptive(formula = Y ~ 1, family = "poisson", W = W, burnin = 10,
     n.sample = 50)
    
     --- value of length: 2 type: logical ---
    [1] TRUE FALSE
     --- function from context ---
    function (formula, data = NULL, W, burnin, n.sample, thin = 1,
     prior.mean.beta = NULL, prior.var.beta = NULL, prior.tau2 = NULL,
     rho = NULL, epsilon = 0, MALA = TRUE, verbose = TRUE)
    {
     a <- common.verbose(verbose)
     blocksize.beta <- 5
     blocksize.v <- 10
     z <- which(W > 0, arr.ind = T)
     locs <- z[which(z[, 1] < z[, 2]), ]
     char.locs <- paste(locs[, 1], ".", locs[, 2], sep = "")
     n.edges <- nrow(locs)
     if (!is.symmetric.matrix(W))
     stop("W is not symmetric.", call. = FALSE)
     if (class(W) == "matrix")
     W <- as.spam(W)
     if (!class(W) %in% c("matrix", "spam"))
     stop("W must be an object with class \"matrix\" or \"spam\"",
     call. = FALSE)
     logit <- function(p) log(p/(1 - p))
     inv_logit <- function(v) 1/(1 + exp(-v))
     if (length(MALA) != 1)
     stop("MALA is not length 1.", call. = FALSE)
     if (!is.logical(MALA))
     stop("MALA is not logical.", call. = FALSE)
     frame <- try(suppressWarnings(model.frame(formula, data = data,
     na.action = na.pass)), silent = TRUE)
     if (class(frame) == "try-error")
     stop("the formula inputted contains an error, e.g the variables may be different lengths.",
     call. = FALSE)
     X <- try(suppressWarnings(model.matrix(object = attr(frame,
     "terms"), data = frame)), silent = TRUE)
     if (class(X) == "try-error")
     stop("the covariate matrix contains inappropriate values.",
     call. = FALSE)
     if (sum(is.na(X)) > 0)
     stop("the covariate matrix contains missing 'NA' values.",
     call. = FALSE)
     p <- ncol(X)
     y <- model.response(frame)
     which.miss <- as.numeric(!is.na(y))
     n.sites <- as.integer(nrow(W))
     n.time <- as.integer(length(y)/n.sites)
     k <- as.integer(round(n.sites * n.time, 0))
     if (is.null(prior.mean.beta))
     prior.mean.beta <- rep(0, p)
     if (is.null(prior.var.beta))
     prior.var.beta <- rep(1e+05, p)
     if (is.null(prior.tau2))
     prior.tau2 <- c(1, 0.01)
     prior.beta.check(prior.mean.beta, prior.var.beta, p)
     prior.var.check(prior.tau2)
     offset <- try(model.offset(frame), silent = TRUE)
     if (class(offset) == "try-error")
     stop("the offset is not numeric.", call. = FALSE)
     if (is.null(offset))
     offset <- rep(0, (n.time * n.sites))
     if (sum(is.na(offset)) > 0)
     stop("the offset has missing 'NA' values.", call. = FALSE)
     if (!is.numeric(offset))
     stop("the offset variable has non-numeric values.", call. = FALSE)
     common.burnin.nsample.thin.check(burnin, n.sample, thin)
     X.standardised <- X
     X.sd <- apply(X, 2, sd)
     X.mean <- apply(X, 2, mean)
     X.indicator <- rep(NA, p)
     for (j in 1:p) {
     if (length(table(X[, j])) > 2) {
     X.indicator[j] <- 1
     X.standardised[, j] <- (X[, j] - mean(X[, j]))/sd(X[,
     j])
     }
     else if (length(table(X[, j])) == 1) {
     X.indicator[j] <- 2
     }
     else {
     X.indicator[j] <- 0
     }
     }
     if (is.numeric(blocksize.v)) {
     fromto <- seq(0, n.edges, by = blocksize.v)
     fromto[1] <- 0
     if (!n.edges %in% fromto)
     fromto <- c(fromto, n.edges)
     n.blocks <- length(fromto) - 1
     blockinds <- vector("list", length = n.blocks)
     for (i in 1:n.blocks) blockinds[[i]] <- (fromto[i] +
     1):fromto[i + 1]
     }
     v <- logit(rtruncnorm(n.edges, mean = 0.999, sd = 0.001,
     a = 0, b = 1))
     v_15 <- v - 15
     vqform_current <- sum(v_15^2)
     W_current <- W
     W_current[locs][1:n.edges] <- inv_logit(v)
     W_current[locs[, 2:1]][1:n.edges] <- inv_logit(v)
     rhofix <- rho
     rho <- ifelse(!is.null(rhofix), rhofix, 0.99)
     fixedridge <- epsilon
     if (rho == 1)
     fixedridge <- 1e-04
     if (!is.numeric(rho))
     stop("rho is fixed but is not numeric.", call. = FALSE)
     if (rho < 0)
     stop("rho is outside the range [0, 1].", call. = FALSE)
     if (rho > 1)
     stop("rho is outside the range [0, 1].", call. = FALSE)
     tripList <- vector("list", length = 2)
     tripList[[1]] <- cbind(1:nrow(W_current), 1:nrow(W_current),
     rowSums(W_current) + fixedridge)
     tripList[[2]] <- cbind(rbind(locs, locs[, 2:1]), -rep(inv_logit(v),
     2))
     Q.space.trip <- rbind(tripList[[1]], tripList[[2]])
     Q.space.trip <- updatetriplets_rho(trips = Q.space.trip,
     nsites = n.sites, rho_old = 1, rho_new = rho, fixedridge = fixedridge)
     Q.space <- Q.space.prop <- spam(list(i = Q.space.trip[, 1],
     j = Q.space.trip[, 2], Q.space.trip[, 3]))
     chol.Q.space <- chol.spam(Q.space)
     Q.space.det.old <- n.time * 2 * determinant(chol.Q.space,
     logarithm = T)$modulus
     alpha <- 1
     if (n.time > 1) {
     Q.block <- as.spam(crossprod(diff(diag(n.time))))
     Q.block[1, 1] <- Q.block[1, 1] + 1
     Dg <- diag.spam(diag.spam(Q.block))
     R <- Q.block - Dg
     Dtime <- diag.spam(c(rep(1, nrow(Q.block) - 1), 0))
     Dg <- Dg - Dtime
     Q.time <- Dg + Dtime * alpha^2 + R * alpha
     Q.time[n.time, n.time] <- 1
     Q.det <- determinant(Q.time, logarithm = T)
     detTime <- as.numeric(0.5 * n.sites * (Q.det$m) * (Q.det$s))
     Q.time.trip <- Reduce("cbind", triplet(Q.time))
     }
     else {
     Q.time <- 1
     detTime <- 1
     Q.time.trip <- matrix(rep(1, 3), ncol = 3)
     }
     phi_tune <- 0.5
     W.tune <- 1
     rho.tune <- 0.1
     tau_v <- 200
     prior.max.tau <- 1000
     increment <- 0
     glm_mod <- glm(y ~ -1 + X.standardised, family = "quasipoisson",
     offset = offset)
     beta.mean <- glm_mod$coefficients
     beta.sd <- sqrt(diag(summary(glm_mod)$cov.scaled))
     beta_par <- rnorm(n = length(beta.mean), mean = beta.mean,
     sd = beta.sd)
     log.y <- log(y)
     log.y[y == 0] <- -0.1
     res.temp <- log.y - X.standardised %*% beta_par - offset
     res.sd <- sd(res.temp, na.rm = TRUE)/5
     phi <- rnorm(k, mean = 0, sd = res.sd)
     tau <- var(phi)/10
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     XB <- X.standardised %*% beta_par
     tau_v.shape <- (n.edges/2) + prior.tau2[1]
     tau_phi_shape <- (n.sites * n.time/2) + prior.tau2[1]
     n.save <- ifelse(thin == 1, (n.sample - burnin), (n.sample -
     burnin)/thin)
     accept.all <- rep(0, 8)
     accept <- accept.all
     samples.beta <- array(NA, c(n.save, p))
     samples.phi <- array(NA, c(n.save, n.sites * n.time))
     samples.tau2 <- samples.vtau2 <- samples.alpha <- samples.rho <- matrix(0,
     n.save, 1)
     samples.v <- matrix(0, ncol = n.edges, nrow = c(n.save, n.sites *
     n.time))
     samples.fit <- array(NA, c(n.save, n.sites * n.time))
     samples.loglike <- array(NA, c(n.save, n.sites * n.time))
     options(spam.cholsymmetrycheck = FALSE)
     options(spam.cholpivotcheck = FALSE)
     options(spam.safemode = c(F, F, F))
     block.temp <- common.betablock(p)
     beta.beg <- block.temp[[1]]
     beta.fin <- block.temp[[2]]
     n.beta.block <- block.temp[[3]]
     list.block <- as.list(rep(NA, n.beta.block * 2))
     for (r in 1:n.beta.block) {
     list.block[[r]] <- beta.beg[r]:beta.fin[r] - 1
     list.block[[r + n.beta.block]] <- length(list.block[[r]])
     }
     proposal.sd.beta <- 0.01
     proposal.corr.beta <- solve(t(X.standardised) %*% X.standardised)
     chol.proposal.corr.beta <- chol(proposal.corr.beta)
     perm <- order(Q.space.trip[, 1], Q.space.trip[, 2])
     diags.space <- which(Q.space.trip[perm, 1] == Q.space.trip[perm,
     2])
     if (n.time > 1)
     diag.time <- Reduce("cbind", triplet(diag.spam(n.time -
     1)))
     time.last.diag <- which((Q.time.trip[, 1] == Q.time.trip[,
     2]) & (Q.time.trip[, 1] == n.time))
     lastblock <- (k - n.sites + 1):k
     firstblock <- 1:n.sites
     n.keep <- floor((n.sample - burnin)/thin)
     if (verbose) {
     cat("Generating", n.keep, "post burnin and thinned (if requested) samples.\n",
     sep = " ")
     progressBar <- txtProgressBar(style = 3)
     percentage.points <- round((1:100/100) * n.sample)
     }
     else percentage.points <- round((1:100/100) * n.sample)
     for (j in 1:n.sample) {
     save.iter <- j > burnin && ((j%%thin == 0) | thin ==
     0)
     if (save.iter)
     increment <- increment + 1
     if (n.time > 1) {
     phifirst <- phi[-firstblock]
     philast <- phi[-lastblock]
     philastQphilast <- qform_ST(Qspace = Q.space.trip,
     Qtime = diag.time, phi = philast, nsites = n.sites)
     phifirstQphilast <- qform_ST_asym(Qspace = Q.space.trip,
     Qtime = diag.time, phi1 = phifirst, phi2 = philast,
     nsites = n.sites)
     mu_alpha <- phifirstQphilast/philastQphilast
     mu_sigmasq <- tau/philastQphilast
     alpha <- rtruncnorm(n = 1, a = 10^-5, b = 1 - 10^-5,
     mean = mu_alpha, sd = sqrt(mu_sigmasq))
     Q.time.trip <- update_Qtime(Q.time.trip, alpha, time.last.diag -
     1)
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     detTime <- determinant(Q.time, logarithm = TRUE)
     detTime <- (detTime$m) * (detTime$s)
     }
     tau_scale <- vqform_current/2 + prior.tau2[2]
     tau_v <- 1/rtrunc(n = 1, spec = "gamma", a = 1e-06, b = Inf,
     shape = tau_v.shape, scale = (1/tau_scale))
     v.proposal <- rtruncnorm(n = n.edges, a = -15, b = 15,
     mean = v, sd = W.tune)
     for (i in 1:n.blocks) {
     vnew <- v
     block_inds <- blockinds[[i]]
     vnew[block_inds] <- v.proposal[block_inds]
     tripUpdate <- updatetripList2(Q.space.trip, vold = v,
     vnew = vnew, nedges = n.edges, nsites = n.sites,
     block = block_inds, block_length = length(block_inds),
     fixedridge = fixedridge, rho = rho)
     Q.space.trip.prop <- tripUpdate[[1]]
     Q.space.trip.diff <- tripUpdate[[2]]
     Q.space.prop@entries <- Q.space.trip.prop[perm, 3]
     Q.space.trip.diff[, 3] <- Q.space.trip[, 3] - Q.space.trip.prop[,
     3]
     phiQphi_phiQphiNew <- qform_difference_ST(Qtrip = Q.space.trip.diff,
     Qtime = Q.time.trip, phi = phi, nsites = n.sites)
     chol.Q.space.prop <- update(chol.Q.space, x = Q.space.prop)
     detSpace <- 2 * determinant(chol.Q.space.prop, logarithm = T)$modulus
     Q.space.det.prop <- n.sites * detTime + n.time *
     detSpace
     v_15_prop <- vnew - 15
     vqform_prop <- sum(v_15_prop^2)
     acceptance <- exp(0.5 * (Q.space.det.prop - Q.space.det.old) +
     (1/(2 * tau)) * (phiQphi_phiQphiNew) + 0.5 *
     (1/tau_v) * (vqform_current - vqform_prop))
     accept[8] <- accept[8] + (1/n.blocks)
     if (runif(1) <= acceptance) {
     vqform_current <- vqform_prop
     v <- vnew
     accept[7] <- accept[7] + (1/n.blocks)
     Q.space.det.old <- Q.space.det.prop
     Q.space.trip <- Q.space.trip.prop
     chol.Q.space <- chol.Q.space.prop
     Q.space <- Q.space.prop
     }
     }
     offset.temp <- offset + as.numeric(phi)
     if (p > 2) {
     temp <- poissonbetaupdateMALA(X.standardised, k,
     p, beta_par, offset.temp, y, prior.mean.beta,
     prior.var.beta, n.beta.block, proposal.sd.beta,
     list.block)
     }
     else {
     temp <- poissonbetaupdateRW(X.standardised, k, p,
     beta_par, offset.temp, y, prior.mean.beta, prior.var.beta,
     proposal.sd.beta)
     }
     beta_par <- temp[[1]]
     accept[1] <- accept[1] + temp[[2]]
     accept[2] <- accept[2] + n.beta.block
     XB <- X.standardised %*% beta_par
     nneighbours <- diag.spam(Q.space)
     W_current <- diag(nneighbours) - as.matrix(Q.space)
     if (MALA) {
     phi_update <- SPTICARphiVarbMALA(W = W_current, nsites = n.sites,
     ntimes = n.time, phiVarb = phi, nneighbours = nneighbours,
     tau = tau, y = y, E = offset, phiVarb_tune = phi_tune,
     alpha = alpha, XB = XB)
     }
     else {
     phi_update <- SPTICARphiVarb(W = W_current, nsites = n.sites,
     ntimes = n.time, phiVarb = phi, nneighbours = nneighbours,
     tau = tau, y = y, E = offset, phiVarb_tune = phi_tune,
     alpha = alpha, XB = XB)
     }
     phi <- phi_update[[2]]
     phi <- phi - mean(phi)
     accept[3] <- accept[3] + phi_update[[1]][2]
     accept[4] <- accept[4] + k
     if (!is.null(rhofix)) {
     proposal.rho <- rhofix
     }
     else {
     proposal.rho <- rtruncnorm(n = 1, a = 0, b = 1, mean = rho,
     sd = rho.tune)
     }
     Q.space.trip.prop <- updatetriplets_rho(trips = Q.space.trip,
     nsites = n.sites, rho_old = rho, rho_new = proposal.rho,
     fixedridge = fixedridge)
     Q.space.prop@entries <- Q.space.trip.prop[perm, 3]
     Q.space.trip.diff[, 3] <- Q.space.trip[, 3] - Q.space.trip.prop[,
     3]
     phiQphi_phiQphiNew <- qform_difference_ST(Qtrip = Q.space.trip.diff,
     Qtime = Q.time.trip, phi = phi, nsites = n.sites)
     chol.Q.space.prop <- update(chol.Q.space, x = Q.space.prop)
     detSpace <- 2 * determinant(chol.Q.space.prop, logarithm = T)$modulus
     Q.space.det.prop <- n.sites * detTime + n.time * detSpace
     acceptance <- exp(0.5 * (Q.space.det.prop - Q.space.det.old) +
     (1/(2 * tau)) * (phiQphi_phiQphiNew))
     accept[6] <- accept[6] + 1
     if (runif(1) <= acceptance) {
     accept[5] <- accept[5] + 1
     Q.space.det.old <- Q.space.det.prop
     Q.space.trip <- Q.space.trip.prop
     chol.Q.space <- chol.Q.space.prop
     Q.space <- Q.space.prop
     rho <- proposal.rho
     }
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     tau_scale <- phiQphi/2 + prior.tau2[2]
     tau <- 1/rtrunc(n = 1, spec = "gamma", a = 1e-06, b = Inf,
     shape = tau_phi_shape, scale = (1/tau_scale))
     fitted <- exp(as.vector(XB) + phi + offset)
     loglike <- dpois(x = as.numeric(y), lambda = fitted,
     log = TRUE)
     if (save.iter) {
     samples.beta[increment, ] <- beta_par
     samples.phi[increment, ] <- phi
     samples.fit[increment, ] <- fitted
     samples.tau2[increment, ] <- tau
     samples.vtau2[increment, ] <- tau_v
     samples.v[increment, ] <- v
     samples.alpha[increment, ] <- alpha
     samples.rho[increment, ] <- rho
     samples.loglike[increment, ] <- loglike
     }
     if (j%%100 == 0) {
     accept.beta <- 100 * accept[1]/accept[2]
     accept.phi <- 100 * accept[3]/accept[4]
     accept.w <- 100 * accept[7]/accept[8]
     if (is.null(rhofix)) {
     accept.rho <- 100 * accept[5]/accept[6]
     }
     else {
     accept.rho <- 45
     }
     if (accept.beta > 50) {
     proposal.sd.beta <- proposal.sd.beta + 0.1 *
     proposal.sd.beta
     }
     else if (accept.beta < 40) {
     proposal.sd.beta <- proposal.sd.beta - 0.1 *
     proposal.sd.beta
     }
     else {
     }
     if (accept.phi > 50) {
     phi_tune <- phi_tune + 0.1 * phi_tune
     }
     else if (accept.phi < 40) {
     phi_tune <- phi_tune - 0.1 * phi_tune
     }
     else {
     }
     if (accept.w > 40) {
     W.tune <- W.tune + 0.1 * W.tune
     }
     else if (accept.w < 20) {
     W.tune <- W.tune - 0.1 * W.tune
     }
     else {
     }
     if (accept.rho > 50) {
     rho.tune <- min(rho.tune + 0.1 * rho.tune, 0.5)
     }
     else if (accept.rho < 40) {
     rho.tune <- rho.tune - 0.1 * rho.tune
     }
     else {
     }
     accept.all <- accept.all + accept
     accept <- accept * 0
     }
     else {
     }
     if (j %in% percentage.points & verbose)
     setTxtProgressBar(progressBar, j/n.sample)
     }
     if (verbose)
     cat("\nSummarising results.")
     close(progressBar)
     accept.beta <- 100 * accept.all[1]/accept.all[2]
     accept.phi <- 100 * accept.all[3]/accept.all[4]
     accept.rho <- 100 * accept.all[5]/accept.all[6]
     accept.w <- 100 * accept.all[7]/accept.all[8]
     accept.alpha <- 100
     if (!is.null(rhofix)) {
     accept.final <- c(accept.beta, accept.phi, accept.w)
     names(accept.final) <- c("beta", "phi", "w")
     }
     else {
     accept.final <- c(accept.beta, accept.phi, accept.rho,
     accept.w)
     names(accept.final) <- c("beta", "phi", "rho", "w")
     }
     mean.beta <- apply(samples.beta, 2, mean)
     regression.mat <- matrix(X.standardised %*% mean.beta, nrow = n.sites,
     ncol = n.time, byrow = FALSE)
     mean.phi <- matrix(apply(samples.phi, 2, mean), nrow = n.sites,
     ncol = n.time)
     offset.mat <- matrix(offset, nrow = n.sites, ncol = n.time,
     byrow = FALSE)
     fitted.mean <- as.numeric(exp(offset.mat + mean.phi + regression.mat))
     deviance.fitted <- -2 * sum(dpois(x = as.numeric(y), lambda = fitted.mean,
     log = TRUE))
     modelfit <- common.modelfit(samples.loglike, deviance.fitted)
     fitted.values <- apply(samples.fit, 2, mean)
     response.residuals <- as.numeric(y) - fitted.values
     pearson.residuals <- response.residuals/sqrt(fitted.values)
     residuals <- data.frame(response = response.residuals, pearson = pearson.residuals)
     samples.beta.orig <- common.betatransform(samples.beta, X.indicator,
     X.mean, X.sd, p, FALSE)
     samples.beta.orig <- mcmc(samples.beta.orig)
     summary.beta <- t(apply(samples.beta.orig, 2, quantile, c(0.5,
     0.025, 0.975)))
     summary.beta <- cbind(summary.beta, rep(n.save, p), rep(accept.beta,
     p), effectiveSize(samples.beta.orig), geweke.diag(samples.beta.orig)$z)
     rownames(summary.beta) <- colnames(X)
     colnames(summary.beta) <- c("Median", "2.5%", "97.5%", "n.sample",
     "% accept", "n.effective", "Geweke.diag")
     summary.hyper <- array(NA, c(4, 7))
     summary.hyper[1, 1:3] <- quantile(samples.tau2, c(0.5, 0.025,
     0.975))
     summary.hyper[2, 1:3] <- quantile(samples.rho, c(0.5, 0.025,
     0.975))
     summary.hyper[3, 1:3] <- quantile(samples.alpha, c(0.5, 0.025,
     0.975))
     summary.hyper[4, 1:3] <- quantile(samples.vtau2, c(0.5, 0.025,
     0.975))
     rownames(summary.hyper) <- c("tau2", "rho.S", "rho.T", "tau2.w")
     summary.hyper[1, 4:7] <- c(n.save, 100, effectiveSize(mcmc(samples.tau2)),
     geweke.diag(mcmc(samples.tau2))$z)
     summary.hyper[2, 4:7] <- c(n.save, accept.rho, effectiveSize(mcmc(samples.rho)),
     geweke.diag(mcmc(samples.rho))$z)
     summary.hyper[3, 4:7] <- c(n.save, accept.alpha, effectiveSize(mcmc(samples.alpha)),
     geweke.diag(mcmc(samples.alpha))$z)
     summary.hyper[4, 4:7] <- c(n.save, 100, effectiveSize(mcmc(samples.vtau2)),
     geweke.diag(mcmc(samples.vtau2))$z)
     if (!is.null(rhofix)) {
     summary.hyper[2, ] <- c(rep(rhofix, 3), rep(NA, 4))
     }
     summary.results <- rbind(summary.beta, summary.hyper)
     summary.results[, 1:3] <- round(summary.results[, 1:3], 4)
     summary.results[, 4:7] <- round(summary.results[, 4:7], 1)
     samples.w <- inv_logit(samples.v)
     colnames(samples.w) <- char.locs
     get_prop_thresh <- function(v, thresh) as.numeric(!((sum(v <
     thresh)/length(v)) < 0.99))
     bdry99 <- apply(samples.w, 2, get_prop_thresh, thresh = 0.5)
     bdryMN <- apply(samples.w, 2, mean)
     Wmn <- W99 <- matrix(NA, nrow = n.sites, ncol = n.sites)
     W99[locs] <- bdry99
     W99[locs[, c(2, 1)]] <- bdry99
     Wmn[locs] <- bdryMN
     Wmn[locs[, c(2, 1)]] <- bdryMN
     model.string <- c("Likelihood model - Poisson (log link function)",
     "\nLatent structure model - Adaptive autoregressive CAR model\n")
     samples.tau2all <- cbind(samples.tau2, samples.vtau2)
     colnames(samples.tau2all) <- c("tau2", "tau2.w")
     if (is.null(rhofix)) {
     samples.rhoext <- cbind(samples.rho, samples.alpha)
     colnames(samples.rhoext) <- c("rho.S", "rho.T")
     }
     else {
     samples.rhoext <- cbind(samples.alpha)
     names(samples.rhoext) <- c("rho.T")
     }
     samples <- list(beta = mcmc(samples.beta.orig), phi = mcmc(samples.phi),
     rho = mcmc(samples.rhoext), tau2 = mcmc(samples.tau2all),
     w = mcmc(samples.w), fitted = mcmc(samples.fit))
     localised.structure <- list(Wmedian = Wmn, W99 = W99)
     results <- list(summary.results = summary.results, samples = samples,
     fitted.values = fitted.values, residuals = residuals,
     modelfit = modelfit, accept = accept.final, localised.structure = localised.structure,
     formula = formula, model = model.string, X = X)
     class(results) <- "CARBayesST"
     if (verbose) {
     b <- proc.time()
     cat("Finished in ", round(b[3] - a[3], 1), "seconds.\n")
     }
     else {
     }
     return(results)
    }
    <bytecode: 0x118cc708>
    <environment: namespace:CARBayesST>
     --- function search by body ---
    Function poisson.CARadaptive in namespace CARBayesST has this body.
     ----------- END OF FAILURE REPORT --------------
    Fatal error: the condition has length > 1
Flavor: r-devel-linux-x86_64-debian-clang

Version: 3.0.1
Check: examples
Result: ERROR
    Running examples in ‘CARBayesST-Ex.R’ failed
    The error most likely occurred in:
    
    > base::assign(".ptime", proc.time(), pos = "CheckExEnv")
    > ### Name: ST.CARadaptive
    > ### Title: Fit a spatio-temporal generalised linear mixed model to data,
    > ### with a spatio-temporal autoregressive process that has an adaptive
    > ### autocorrelation stucture.
    > ### Aliases: ST.CARadaptive
    >
    > ### ** Examples
    >
    > #################################################
    > #### Run the model on simulated data on a lattice
    > #################################################
    > #### set up the regular lattice
    > x.easting <- 1:10
    > x.northing <- 1:10
    > Grid <- expand.grid(x.easting, x.northing)
    > K <- nrow(Grid)
    > N <- 10
    > N.all <- N * K
    >
    >
    > #### set up spatial neighbourhood matrix W
    > distance <- as.matrix(dist(Grid))
    > W <-array(0, c(K,K))
    > W[distance==1] <-1
    >
    >
    > #### Simulate the elements in the linear predictor and the data
    > Q.W <- 0.99 * (diag(apply(W, 2, sum)) - W) + 0.01 * diag(rep(1,K))
    > Q.W.inv <- solve(Q.W)
    > phi.temp <- mvrnorm(n=1, mu=rep(0,K), Sigma=(0.1 * Q.W.inv))
    > phi <- phi.temp
    > for(i in 2:N)
    + {
    + phi.temp2 <- mvrnorm(n=1, mu=(0.8 * phi.temp), Sigma=(0.1 * Q.W.inv))
    + phi.temp <- phi.temp2
    + phi <- c(phi, phi.temp)
    + }
    > jump <- rep(c(rep(2, 70), rep(4, 30)),N)
    > LP <- jump + phi
    > fitted <- exp(LP)
    > Y <- rpois(n=N.all, lambda=fitted)
    >
    >
    > #### Run the model
    > ## Not run:
    > ##D model <- ST.CARadaptive(formula=Y~1, family="poisson", W=W, burnin=10000,
    > ##D n.sample=50000)
    > ## End(Not run)
    >
    >
    > #### Toy example for checking
    > model <- ST.CARadaptive(formula=Y~1, family="poisson", W=W, burnin=10,
    + n.sample=50)
    Setting up the model.
     ----------- FAILURE REPORT --------------
     --- failure: the condition has length > 1 ---
     --- srcref ---
    :
     --- package (from environment) ---
    CARBayesST
     --- call from context ---
    poisson.CARadaptive(formula = formula, data = data, W = W, burnin = burnin,
     n.sample = n.sample, thin = thin, prior.mean.beta = prior.mean.beta,
     prior.var.beta = prior.var.beta, prior.tau2 = prior.tau2,
     rho = rho, epsilon = epsilon, MALA = MALA, verbose = verbose)
     --- call from argument ---
    if (class(W) == "matrix") W <- as.spam(W)
     --- R stacktrace ---
    where 1: poisson.CARadaptive(formula = formula, data = data, W = W, burnin = burnin,
     n.sample = n.sample, thin = thin, prior.mean.beta = prior.mean.beta,
     prior.var.beta = prior.var.beta, prior.tau2 = prior.tau2,
     rho = rho, epsilon = epsilon, MALA = MALA, verbose = verbose)
    where 2: ST.CARadaptive(formula = Y ~ 1, family = "poisson", W = W, burnin = 10,
     n.sample = 50)
    
     --- value of length: 2 type: logical ---
    [1] TRUE FALSE
     --- function from context ---
    function (formula, data = NULL, W, burnin, n.sample, thin = 1,
     prior.mean.beta = NULL, prior.var.beta = NULL, prior.tau2 = NULL,
     rho = NULL, epsilon = 0, MALA = TRUE, verbose = TRUE)
    {
     a <- common.verbose(verbose)
     blocksize.beta <- 5
     blocksize.v <- 10
     z <- which(W > 0, arr.ind = T)
     locs <- z[which(z[, 1] < z[, 2]), ]
     char.locs <- paste(locs[, 1], ".", locs[, 2], sep = "")
     n.edges <- nrow(locs)
     if (!is.symmetric.matrix(W))
     stop("W is not symmetric.", call. = FALSE)
     if (class(W) == "matrix")
     W <- as.spam(W)
     if (!class(W) %in% c("matrix", "spam"))
     stop("W must be an object with class \"matrix\" or \"spam\"",
     call. = FALSE)
     logit <- function(p) log(p/(1 - p))
     inv_logit <- function(v) 1/(1 + exp(-v))
     if (length(MALA) != 1)
     stop("MALA is not length 1.", call. = FALSE)
     if (!is.logical(MALA))
     stop("MALA is not logical.", call. = FALSE)
     frame <- try(suppressWarnings(model.frame(formula, data = data,
     na.action = na.pass)), silent = TRUE)
     if (class(frame) == "try-error")
     stop("the formula inputted contains an error, e.g the variables may be different lengths.",
     call. = FALSE)
     X <- try(suppressWarnings(model.matrix(object = attr(frame,
     "terms"), data = frame)), silent = TRUE)
     if (class(X) == "try-error")
     stop("the covariate matrix contains inappropriate values.",
     call. = FALSE)
     if (sum(is.na(X)) > 0)
     stop("the covariate matrix contains missing 'NA' values.",
     call. = FALSE)
     p <- ncol(X)
     y <- model.response(frame)
     which.miss <- as.numeric(!is.na(y))
     n.sites <- as.integer(nrow(W))
     n.time <- as.integer(length(y)/n.sites)
     k <- as.integer(round(n.sites * n.time, 0))
     if (is.null(prior.mean.beta))
     prior.mean.beta <- rep(0, p)
     if (is.null(prior.var.beta))
     prior.var.beta <- rep(1e+05, p)
     if (is.null(prior.tau2))
     prior.tau2 <- c(1, 0.01)
     prior.beta.check(prior.mean.beta, prior.var.beta, p)
     prior.var.check(prior.tau2)
     offset <- try(model.offset(frame), silent = TRUE)
     if (class(offset) == "try-error")
     stop("the offset is not numeric.", call. = FALSE)
     if (is.null(offset))
     offset <- rep(0, (n.time * n.sites))
     if (sum(is.na(offset)) > 0)
     stop("the offset has missing 'NA' values.", call. = FALSE)
     if (!is.numeric(offset))
     stop("the offset variable has non-numeric values.", call. = FALSE)
     common.burnin.nsample.thin.check(burnin, n.sample, thin)
     X.standardised <- X
     X.sd <- apply(X, 2, sd)
     X.mean <- apply(X, 2, mean)
     X.indicator <- rep(NA, p)
     for (j in 1:p) {
     if (length(table(X[, j])) > 2) {
     X.indicator[j] <- 1
     X.standardised[, j] <- (X[, j] - mean(X[, j]))/sd(X[,
     j])
     }
     else if (length(table(X[, j])) == 1) {
     X.indicator[j] <- 2
     }
     else {
     X.indicator[j] <- 0
     }
     }
     if (is.numeric(blocksize.v)) {
     fromto <- seq(0, n.edges, by = blocksize.v)
     fromto[1] <- 0
     if (!n.edges %in% fromto)
     fromto <- c(fromto, n.edges)
     n.blocks <- length(fromto) - 1
     blockinds <- vector("list", length = n.blocks)
     for (i in 1:n.blocks) blockinds[[i]] <- (fromto[i] +
     1):fromto[i + 1]
     }
     v <- logit(rtruncnorm(n.edges, mean = 0.999, sd = 0.001,
     a = 0, b = 1))
     v_15 <- v - 15
     vqform_current <- sum(v_15^2)
     W_current <- W
     W_current[locs][1:n.edges] <- inv_logit(v)
     W_current[locs[, 2:1]][1:n.edges] <- inv_logit(v)
     rhofix <- rho
     rho <- ifelse(!is.null(rhofix), rhofix, 0.99)
     fixedridge <- epsilon
     if (rho == 1)
     fixedridge <- 1e-04
     if (!is.numeric(rho))
     stop("rho is fixed but is not numeric.", call. = FALSE)
     if (rho < 0)
     stop("rho is outside the range [0, 1].", call. = FALSE)
     if (rho > 1)
     stop("rho is outside the range [0, 1].", call. = FALSE)
     tripList <- vector("list", length = 2)
     tripList[[1]] <- cbind(1:nrow(W_current), 1:nrow(W_current),
     rowSums(W_current) + fixedridge)
     tripList[[2]] <- cbind(rbind(locs, locs[, 2:1]), -rep(inv_logit(v),
     2))
     Q.space.trip <- rbind(tripList[[1]], tripList[[2]])
     Q.space.trip <- updatetriplets_rho(trips = Q.space.trip,
     nsites = n.sites, rho_old = 1, rho_new = rho, fixedridge = fixedridge)
     Q.space <- Q.space.prop <- spam(list(i = Q.space.trip[, 1],
     j = Q.space.trip[, 2], Q.space.trip[, 3]))
     chol.Q.space <- chol.spam(Q.space)
     Q.space.det.old <- n.time * 2 * determinant(chol.Q.space,
     logarithm = T)$modulus
     alpha <- 1
     if (n.time > 1) {
     Q.block <- as.spam(crossprod(diff(diag(n.time))))
     Q.block[1, 1] <- Q.block[1, 1] + 1
     Dg <- diag.spam(diag.spam(Q.block))
     R <- Q.block - Dg
     Dtime <- diag.spam(c(rep(1, nrow(Q.block) - 1), 0))
     Dg <- Dg - Dtime
     Q.time <- Dg + Dtime * alpha^2 + R * alpha
     Q.time[n.time, n.time] <- 1
     Q.det <- determinant(Q.time, logarithm = T)
     detTime <- as.numeric(0.5 * n.sites * (Q.det$m) * (Q.det$s))
     Q.time.trip <- Reduce("cbind", triplet(Q.time))
     }
     else {
     Q.time <- 1
     detTime <- 1
     Q.time.trip <- matrix(rep(1, 3), ncol = 3)
     }
     phi_tune <- 0.5
     W.tune <- 1
     rho.tune <- 0.1
     tau_v <- 200
     prior.max.tau <- 1000
     increment <- 0
     glm_mod <- glm(y ~ -1 + X.standardised, family = "quasipoisson",
     offset = offset)
     beta.mean <- glm_mod$coefficients
     beta.sd <- sqrt(diag(summary(glm_mod)$cov.scaled))
     beta_par <- rnorm(n = length(beta.mean), mean = beta.mean,
     sd = beta.sd)
     log.y <- log(y)
     log.y[y == 0] <- -0.1
     res.temp <- log.y - X.standardised %*% beta_par - offset
     res.sd <- sd(res.temp, na.rm = TRUE)/5
     phi <- rnorm(k, mean = 0, sd = res.sd)
     tau <- var(phi)/10
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     XB <- X.standardised %*% beta_par
     tau_v.shape <- (n.edges/2) + prior.tau2[1]
     tau_phi_shape <- (n.sites * n.time/2) + prior.tau2[1]
     n.save <- ifelse(thin == 1, (n.sample - burnin), (n.sample -
     burnin)/thin)
     accept.all <- rep(0, 8)
     accept <- accept.all
     samples.beta <- array(NA, c(n.save, p))
     samples.phi <- array(NA, c(n.save, n.sites * n.time))
     samples.tau2 <- samples.vtau2 <- samples.alpha <- samples.rho <- matrix(0,
     n.save, 1)
     samples.v <- matrix(0, ncol = n.edges, nrow = c(n.save, n.sites *
     n.time))
     samples.fit <- array(NA, c(n.save, n.sites * n.time))
     samples.loglike <- array(NA, c(n.save, n.sites * n.time))
     options(spam.cholsymmetrycheck = FALSE)
     options(spam.cholpivotcheck = FALSE)
     options(spam.safemode = c(F, F, F))
     block.temp <- common.betablock(p)
     beta.beg <- block.temp[[1]]
     beta.fin <- block.temp[[2]]
     n.beta.block <- block.temp[[3]]
     list.block <- as.list(rep(NA, n.beta.block * 2))
     for (r in 1:n.beta.block) {
     list.block[[r]] <- beta.beg[r]:beta.fin[r] - 1
     list.block[[r + n.beta.block]] <- length(list.block[[r]])
     }
     proposal.sd.beta <- 0.01
     proposal.corr.beta <- solve(t(X.standardised) %*% X.standardised)
     chol.proposal.corr.beta <- chol(proposal.corr.beta)
     perm <- order(Q.space.trip[, 1], Q.space.trip[, 2])
     diags.space <- which(Q.space.trip[perm, 1] == Q.space.trip[perm,
     2])
     if (n.time > 1)
     diag.time <- Reduce("cbind", triplet(diag.spam(n.time -
     1)))
     time.last.diag <- which((Q.time.trip[, 1] == Q.time.trip[,
     2]) & (Q.time.trip[, 1] == n.time))
     lastblock <- (k - n.sites + 1):k
     firstblock <- 1:n.sites
     n.keep <- floor((n.sample - burnin)/thin)
     if (verbose) {
     cat("Generating", n.keep, "post burnin and thinned (if requested) samples.\n",
     sep = " ")
     progressBar <- txtProgressBar(style = 3)
     percentage.points <- round((1:100/100) * n.sample)
     }
     else percentage.points <- round((1:100/100) * n.sample)
     for (j in 1:n.sample) {
     save.iter <- j > burnin && ((j%%thin == 0) | thin ==
     0)
     if (save.iter)
     increment <- increment + 1
     if (n.time > 1) {
     phifirst <- phi[-firstblock]
     philast <- phi[-lastblock]
     philastQphilast <- qform_ST(Qspace = Q.space.trip,
     Qtime = diag.time, phi = philast, nsites = n.sites)
     phifirstQphilast <- qform_ST_asym(Qspace = Q.space.trip,
     Qtime = diag.time, phi1 = phifirst, phi2 = philast,
     nsites = n.sites)
     mu_alpha <- phifirstQphilast/philastQphilast
     mu_sigmasq <- tau/philastQphilast
     alpha <- rtruncnorm(n = 1, a = 10^-5, b = 1 - 10^-5,
     mean = mu_alpha, sd = sqrt(mu_sigmasq))
     Q.time.trip <- update_Qtime(Q.time.trip, alpha, time.last.diag -
     1)
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     detTime <- determinant(Q.time, logarithm = TRUE)
     detTime <- (detTime$m) * (detTime$s)
     }
     tau_scale <- vqform_current/2 + prior.tau2[2]
     tau_v <- 1/rtrunc(n = 1, spec = "gamma", a = 1e-06, b = Inf,
     shape = tau_v.shape, scale = (1/tau_scale))
     v.proposal <- rtruncnorm(n = n.edges, a = -15, b = 15,
     mean = v, sd = W.tune)
     for (i in 1:n.blocks) {
     vnew <- v
     block_inds <- blockinds[[i]]
     vnew[block_inds] <- v.proposal[block_inds]
     tripUpdate <- updatetripList2(Q.space.trip, vold = v,
     vnew = vnew, nedges = n.edges, nsites = n.sites,
     block = block_inds, block_length = length(block_inds),
     fixedridge = fixedridge, rho = rho)
     Q.space.trip.prop <- tripUpdate[[1]]
     Q.space.trip.diff <- tripUpdate[[2]]
     Q.space.prop@entries <- Q.space.trip.prop[perm, 3]
     Q.space.trip.diff[, 3] <- Q.space.trip[, 3] - Q.space.trip.prop[,
     3]
     phiQphi_phiQphiNew <- qform_difference_ST(Qtrip = Q.space.trip.diff,
     Qtime = Q.time.trip, phi = phi, nsites = n.sites)
     chol.Q.space.prop <- update(chol.Q.space, x = Q.space.prop)
     detSpace <- 2 * determinant(chol.Q.space.prop, logarithm = T)$modulus
     Q.space.det.prop <- n.sites * detTime + n.time *
     detSpace
     v_15_prop <- vnew - 15
     vqform_prop <- sum(v_15_prop^2)
     acceptance <- exp(0.5 * (Q.space.det.prop - Q.space.det.old) +
     (1/(2 * tau)) * (phiQphi_phiQphiNew) + 0.5 *
     (1/tau_v) * (vqform_current - vqform_prop))
     accept[8] <- accept[8] + (1/n.blocks)
     if (runif(1) <= acceptance) {
     vqform_current <- vqform_prop
     v <- vnew
     accept[7] <- accept[7] + (1/n.blocks)
     Q.space.det.old <- Q.space.det.prop
     Q.space.trip <- Q.space.trip.prop
     chol.Q.space <- chol.Q.space.prop
     Q.space <- Q.space.prop
     }
     }
     offset.temp <- offset + as.numeric(phi)
     if (p > 2) {
     temp <- poissonbetaupdateMALA(X.standardised, k,
     p, beta_par, offset.temp, y, prior.mean.beta,
     prior.var.beta, n.beta.block, proposal.sd.beta,
     list.block)
     }
     else {
     temp <- poissonbetaupdateRW(X.standardised, k, p,
     beta_par, offset.temp, y, prior.mean.beta, prior.var.beta,
     proposal.sd.beta)
     }
     beta_par <- temp[[1]]
     accept[1] <- accept[1] + temp[[2]]
     accept[2] <- accept[2] + n.beta.block
     XB <- X.standardised %*% beta_par
     nneighbours <- diag.spam(Q.space)
     W_current <- diag(nneighbours) - as.matrix(Q.space)
     if (MALA) {
     phi_update <- SPTICARphiVarbMALA(W = W_current, nsites = n.sites,
     ntimes = n.time, phiVarb = phi, nneighbours = nneighbours,
     tau = tau, y = y, E = offset, phiVarb_tune = phi_tune,
     alpha = alpha, XB = XB)
     }
     else {
     phi_update <- SPTICARphiVarb(W = W_current, nsites = n.sites,
     ntimes = n.time, phiVarb = phi, nneighbours = nneighbours,
     tau = tau, y = y, E = offset, phiVarb_tune = phi_tune,
     alpha = alpha, XB = XB)
     }
     phi <- phi_update[[2]]
     phi <- phi - mean(phi)
     accept[3] <- accept[3] + phi_update[[1]][2]
     accept[4] <- accept[4] + k
     if (!is.null(rhofix)) {
     proposal.rho <- rhofix
     }
     else {
     proposal.rho <- rtruncnorm(n = 1, a = 0, b = 1, mean = rho,
     sd = rho.tune)
     }
     Q.space.trip.prop <- updatetriplets_rho(trips = Q.space.trip,
     nsites = n.sites, rho_old = rho, rho_new = proposal.rho,
     fixedridge = fixedridge)
     Q.space.prop@entries <- Q.space.trip.prop[perm, 3]
     Q.space.trip.diff[, 3] <- Q.space.trip[, 3] - Q.space.trip.prop[,
     3]
     phiQphi_phiQphiNew <- qform_difference_ST(Qtrip = Q.space.trip.diff,
     Qtime = Q.time.trip, phi = phi, nsites = n.sites)
     chol.Q.space.prop <- update(chol.Q.space, x = Q.space.prop)
     detSpace <- 2 * determinant(chol.Q.space.prop, logarithm = T)$modulus
     Q.space.det.prop <- n.sites * detTime + n.time * detSpace
     acceptance <- exp(0.5 * (Q.space.det.prop - Q.space.det.old) +
     (1/(2 * tau)) * (phiQphi_phiQphiNew))
     accept[6] <- accept[6] + 1
     if (runif(1) <= acceptance) {
     accept[5] <- accept[5] + 1
     Q.space.det.old <- Q.space.det.prop
     Q.space.trip <- Q.space.trip.prop
     chol.Q.space <- chol.Q.space.prop
     Q.space <- Q.space.prop
     rho <- proposal.rho
     }
     phiQphi <- qform_ST(Qspace = Q.space.trip, Qtime = Q.time.trip,
     phi = phi, nsites = n.sites)
     tau_scale <- phiQphi/2 + prior.tau2[2]
     tau <- 1/rtrunc(n = 1, spec = "gamma", a = 1e-06, b = Inf,
     shape = tau_phi_shape, scale = (1/tau_scale))
     fitted <- exp(as.vector(XB) + phi + offset)
     loglike <- dpois(x = as.numeric(y), lambda = fitted,
     log = TRUE)
     if (save.iter) {
     samples.beta[increment, ] <- beta_par
     samples.phi[increment, ] <- phi
     samples.fit[increment, ] <- fitted
     samples.tau2[increment, ] <- tau
     samples.vtau2[increment, ] <- tau_v
     samples.v[increment, ] <- v
     samples.alpha[increment, ] <- alpha
     samples.rho[increment, ] <- rho
     samples.loglike[increment, ] <- loglike
     }
     if (j%%100 == 0) {
     accept.beta <- 100 * accept[1]/accept[2]
     accept.phi <- 100 * accept[3]/accept[4]
     accept.w <- 100 * accept[7]/accept[8]
     if (is.null(rhofix)) {
     accept.rho <- 100 * accept[5]/accept[6]
     }
     else {
     accept.rho <- 45
     }
     if (accept.beta > 50) {
     proposal.sd.beta <- proposal.sd.beta + 0.1 *
     proposal.sd.beta
     }
     else if (accept.beta < 40) {
     proposal.sd.beta <- proposal.sd.beta - 0.1 *
     proposal.sd.beta
     }
     else {
     }
     if (accept.phi > 50) {
     phi_tune <- phi_tune + 0.1 * phi_tune
     }
     else if (accept.phi < 40) {
     phi_tune <- phi_tune - 0.1 * phi_tune
     }
     else {
     }
     if (accept.w > 40) {
     W.tune <- W.tune + 0.1 * W.tune
     }
     else if (accept.w < 20) {
     W.tune <- W.tune - 0.1 * W.tune
     }
     else {
     }
     if (accept.rho > 50) {
     rho.tune <- min(rho.tune + 0.1 * rho.tune, 0.5)
     }
     else if (accept.rho < 40) {
     rho.tune <- rho.tune - 0.1 * rho.tune
     }
     else {
     }
     accept.all <- accept.all + accept
     accept <- accept * 0
     }
     else {
     }
     if (j %in% percentage.points & verbose)
     setTxtProgressBar(progressBar, j/n.sample)
     }
     if (verbose)
     cat("\nSummarising results.")
     close(progressBar)
     accept.beta <- 100 * accept.all[1]/accept.all[2]
     accept.phi <- 100 * accept.all[3]/accept.all[4]
     accept.rho <- 100 * accept.all[5]/accept.all[6]
     accept.w <- 100 * accept.all[7]/accept.all[8]
     accept.alpha <- 100
     if (!is.null(rhofix)) {
     accept.final <- c(accept.beta, accept.phi, accept.w)
     names(accept.final) <- c("beta", "phi", "w")
     }
     else {
     accept.final <- c(accept.beta, accept.phi, accept.rho,
     accept.w)
     names(accept.final) <- c("beta", "phi", "rho", "w")
     }
     mean.beta <- apply(samples.beta, 2, mean)
     regression.mat <- matrix(X.standardised %*% mean.beta, nrow = n.sites,
     ncol = n.time, byrow = FALSE)
     mean.phi <- matrix(apply(samples.phi, 2, mean), nrow = n.sites,
     ncol = n.time)
     offset.mat <- matrix(offset, nrow = n.sites, ncol = n.time,
     byrow = FALSE)
     fitted.mean <- as.numeric(exp(offset.mat + mean.phi + regression.mat))
     deviance.fitted <- -2 * sum(dpois(x = as.numeric(y), lambda = fitted.mean,
     log = TRUE))
     modelfit <- common.modelfit(samples.loglike, deviance.fitted)
     fitted.values <- apply(samples.fit, 2, mean)
     response.residuals <- as.numeric(y) - fitted.values
     pearson.residuals <- response.residuals/sqrt(fitted.values)
     residuals <- data.frame(response = response.residuals, pearson = pearson.residuals)
     samples.beta.orig <- common.betatransform(samples.beta, X.indicator,
     X.mean, X.sd, p, FALSE)
     samples.beta.orig <- mcmc(samples.beta.orig)
     summary.beta <- t(apply(samples.beta.orig, 2, quantile, c(0.5,
     0.025, 0.975)))
     summary.beta <- cbind(summary.beta, rep(n.save, p), rep(accept.beta,
     p), effectiveSize(samples.beta.orig), geweke.diag(samples.beta.orig)$z)
     rownames(summary.beta) <- colnames(X)
     colnames(summary.beta) <- c("Median", "2.5%", "97.5%", "n.sample",
     "% accept", "n.effective", "Geweke.diag")
     summary.hyper <- array(NA, c(4, 7))
     summary.hyper[1, 1:3] <- quantile(samples.tau2, c(0.5, 0.025,
     0.975))
     summary.hyper[2, 1:3] <- quantile(samples.rho, c(0.5, 0.025,
     0.975))
     summary.hyper[3, 1:3] <- quantile(samples.alpha, c(0.5, 0.025,
     0.975))
     summary.hyper[4, 1:3] <- quantile(samples.vtau2, c(0.5, 0.025,
     0.975))
     rownames(summary.hyper) <- c("tau2", "rho.S", "rho.T", "tau2.w")
     summary.hyper[1, 4:7] <- c(n.save, 100, effectiveSize(mcmc(samples.tau2)),
     geweke.diag(mcmc(samples.tau2))$z)
     summary.hyper[2, 4:7] <- c(n.save, accept.rho, effectiveSize(mcmc(samples.rho)),
     geweke.diag(mcmc(samples.rho))$z)
     summary.hyper[3, 4:7] <- c(n.save, accept.alpha, effectiveSize(mcmc(samples.alpha)),
     geweke.diag(mcmc(samples.alpha))$z)
     summary.hyper[4, 4:7] <- c(n.save, 100, effectiveSize(mcmc(samples.vtau2)),
     geweke.diag(mcmc(samples.vtau2))$z)
     if (!is.null(rhofix)) {
     summary.hyper[2, ] <- c(rep(rhofix, 3), rep(NA, 4))
     }
     summary.results <- rbind(summary.beta, summary.hyper)
     summary.results[, 1:3] <- round(summary.results[, 1:3], 4)
     summary.results[, 4:7] <- round(summary.results[, 4:7], 1)
     samples.w <- inv_logit(samples.v)
     colnames(samples.w) <- char.locs
     get_prop_thresh <- function(v, thresh) as.numeric(!((sum(v <
     thresh)/length(v)) < 0.99))
     bdry99 <- apply(samples.w, 2, get_prop_thresh, thresh = 0.5)
     bdryMN <- apply(samples.w, 2, mean)
     Wmn <- W99 <- matrix(NA, nrow = n.sites, ncol = n.sites)
     W99[locs] <- bdry99
     W99[locs[, c(2, 1)]] <- bdry99
     Wmn[locs] <- bdryMN
     Wmn[locs[, c(2, 1)]] <- bdryMN
     model.string <- c("Likelihood model - Poisson (log link function)",
     "\nLatent structure model - Adaptive autoregressive CAR model\n")
     samples.tau2all <- cbind(samples.tau2, samples.vtau2)
     colnames(samples.tau2all) <- c("tau2", "tau2.w")
     if (is.null(rhofix)) {
     samples.rhoext <- cbind(samples.rho, samples.alpha)
     colnames(samples.rhoext) <- c("rho.S", "rho.T")
     }
     else {
     samples.rhoext <- cbind(samples.alpha)
     names(samples.rhoext) <- c("rho.T")
     }
     samples <- list(beta = mcmc(samples.beta.orig), phi = mcmc(samples.phi),
     rho = mcmc(samples.rhoext), tau2 = mcmc(samples.tau2all),
     w = mcmc(samples.w), fitted = mcmc(samples.fit))
     localised.structure <- list(Wmedian = Wmn, W99 = W99)
     results <- list(summary.results = summary.results, samples = samples,
     fitted.values = fitted.values, residuals = residuals,
     modelfit = modelfit, accept = accept.final, localised.structure = localised.structure,
     formula = formula, model = model.string, X = X)
     class(results) <- "CARBayesST"
     if (verbose) {
     b <- proc.time()
     cat("Finished in ", round(b[3] - a[3], 1), "seconds.\n")
     }
     else {
     }
     return(results)
    }
    <bytecode: 0x5562ab5a3678>
    <environment: namespace:CARBayesST>
     --- function search by body ---
    Function poisson.CARadaptive in namespace CARBayesST has this body.
     ----------- END OF FAILURE REPORT --------------
    Fatal error: the condition has length > 1
Flavor: r-devel-linux-x86_64-debian-gcc

Version: 3.0.1
Check: installed package size
Result: NOTE
     installed size is 5.9Mb
     sub-directories of 1Mb or more:
     doc 1.1Mb
     libs 4.1Mb
Flavors: r-devel-linux-x86_64-fedora-clang, r-release-osx-x86_64, r-oldrel-osx-x86_64