From c1602c4debdfc8dea32eb7378072d29a0dd99ca5 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Tue, 19 Sep 2023 23:44:51 +0200 Subject: [PATCH 01/11] Create CHANGE_LOG.md to record all changes made after the publication of relevant papers. --- CHANGE_LOG.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGE_LOG.md diff --git a/CHANGE_LOG.md b/CHANGE_LOG.md new file mode 100644 index 0000000..9e3c499 --- /dev/null +++ b/CHANGE_LOG.md @@ -0,0 +1 @@ +This is the change log file of the repository recording all changes since the publication of connected papers. It is a document mainly recording the down-scaling effort made to the multi-area model. -- GitLab From 1abd16031a072976ca9564cec56323602c52ce17 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:08:37 +0200 Subject: [PATCH 02/11] Delete CHANGE_LOG.md --- CHANGE_LOG.md | 1 - 1 file changed, 1 deletion(-) delete mode 100644 CHANGE_LOG.md diff --git a/CHANGE_LOG.md b/CHANGE_LOG.md deleted file mode 100644 index 9e3c499..0000000 --- a/CHANGE_LOG.md +++ /dev/null @@ -1 +0,0 @@ -This is the change log file of the repository recording all changes since the publication of connected papers. It is a document mainly recording the down-scaling effort made to the multi-area model. -- GitLab From 2243cfa9648a6f41c377fc67fd07ed9f68a73690 Mon Sep 17 00:00:00 2001 From: didihou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:14:06 +0200 Subject: [PATCH 03/11] / --- ...ckpoint-checkpoint-checkpoint-checkpoint.R | 14 + ...ckpoint-checkpoint-checkpoint-checkpoint.R | 289 ++++++++++++++++++ ...nsities-checkpoint-checkpoint-checkpoint.R | 14 + .../bbAlt-checkpoint-checkpoint-checkpoint.R | 289 ++++++++++++++++++ .../SLN_logdensities-checkpoint-checkpoint.R | 14 + .../bbAlt-checkpoint-checkpoint.R | 289 ++++++++++++++++++ .../SLN_logdensities-checkpoint.R | 14 + .../.ipynb_checkpoints/bbAlt-checkpoint.R | 289 ++++++++++++++++++ 8 files changed, 1212 insertions(+) create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R create mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R new file mode 100644 index 0000000..410fdd1 --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R @@ -0,0 +1,14 @@ +library('aod') +args <- commandArgs(trailingOnly=TRUE) +source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) +f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') +x <- read.table(f) +close(f) + + +dens <- data.matrix(x)[,7] + +m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) +h2.bb <- c(coef(m2.bb)) + +print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R new file mode 100644 index 0000000..6aa3d5e --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R @@ -0,0 +1,289 @@ +# Script provided by Kenneth Knoblauch +# This code is based on functions from the aod package of R written by +# Matthieu Lesnoff and Renaud Lancelot +# (https://cran.r-project.org/web/packages/aod/index.html), published +# under the GPL-3 license +# (https://cran.r-project.org/web/licenses/GPL-3). + +betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), + phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), + hessian = TRUE, control = list(maxit = 2000), ...) +{ + CALL <- mf <- match.call(expand.dots = FALSE) + tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", + "", string) + link <- match.arg(link) + if (length(formula) != 3) + stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") + else if (substring(deparse(formula)[1], 1, 5) != "cbind") + stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", + "The response must be a matrix of the form cbind(success, failure)") + if (length(random) == 3) { + form <- deparse(random) + warning("The formula for phi (", form, ") contains a response which is ignored.") + random <- random[-2] + } + explain <- as.character(attr(terms(random), "variables"))[-1] + if (length(explain) > 1) { + warning("The formula for phi contains several explanatory variables (", + paste(explain, collapse = ", "), ").\n", "Only the first one (", + explain[1], ") was considered.") + explain <- explain[1] + } + gf3 <- if (length(explain) == 1) + paste(as.character(formula[3]), explain, sep = " + ") + else as.character(formula[3]) + gf <- formula(paste(formula[2], "~", gf3)) + if (missing(data)) + data <- environment(gf) + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + modmatrix.b <- if (!is.empty.model(mt)) + model.matrix(mt, mfb) + else matrix(, NROW(Y), 0) + Y <- model.response(mfb, "numeric") + weights <- model.weights(mfb) + if (!is.null(weights) && any(weights < 0)) + stop("Negative wts not allowed") + n <- rowSums(Y) + y <- Y[, 1] + if (any(n == 0)) + warning("The data set contains at least one line with weight = 0.\n") + mr <- match(c("random", "data", "na.action"), names(mf), + 0) + mr <- mf[c(1, mr)] + mr$drop.unused.levels <- TRUE + mr[[1]] <- as.name("model.frame") + names(mr)[2] <- "formula" + mr <- eval(mr, parent.frame()) + if (length(explain) == 0) + modmatrix.phi <- model.matrix(object = ~1, data = mr) + else { + express <- paste("model.matrix(object = ~ -1 + ", explain, + ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", + sep = "") + if (is.ordered(data[, match(explain, table = names(mr))])) + warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") + modmatrix.phi <- eval(parse(text = express)) + } + fam <- eval(parse(text = paste("binomial(link =", link, ")"))) + fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) + b <- coef(fm) + if (any(is.na(b))) { + print(nab <- b[is.na(b)]) + stop("Initial values for the fixed effects contain at least one missing value.") + } + nb.b <- ncol(modmatrix.b) + nb.phi <- ncol(modmatrix.phi) + if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) + stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") + else if (is.null(phi.ini)) + phi.ini <- rep(0.1, nb.phi) + param.ini <- c(b, phi.ini) + if (!is.null(unlist(fixpar))) + param.ini[fixpar[[1]]] <- fixpar[[2]] + minuslogL <- function(param) { + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + b <- param[1:nb.b] + eta <- as.vector(modmatrix.b %*% b) + p <- invlink(eta, type = link) + phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + + nb.phi)]) + cnd <- phi == 0 + f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], + log = TRUE) + n2 <- n[!cnd] + y2 <- y[!cnd] + p2 <- p[!cnd] + phi2 <- phi[!cnd] + f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + + y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * + (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) + fn <- sum(c(f1, f2)) + if (!is.finite(fn)) + fn <- -1e+20 + -fn + } + withWarnings <- function(expr) { + myWarnings <- NULL + wHandler <- function(w) { + myWarnings <<- c(myWarnings, list(w)) + invokeRestart("muffleWarning") + } + val <- withCallingHandlers(expr, warning = wHandler) + list(value = val, warnings = myWarnings) + } + reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, + hessian = hessian, control = control, ...)) + res <- reswarn$value + if (warnings) { + if (length(reswarn$warnings) > 0) { + v <- unlist(lapply(reswarn$warnings, as.character)) + tv <- data.frame(message = v, freq = rep(1, length(v))) + cat("Warnings during likelihood maximisation:\n") + print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), + sum)) + } + } + param <- res$par + namb <- colnames(modmatrix.b) + namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") + nam <- c(namb, namphi) + names(param) <- nam + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + H <- H.singular <- Hr.singular <- NA + varparam <- matrix(NA) + is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) + if (hessian) { + H <- res$hessian + if (is.null(unlist(fixpar))) { + H.singular <- is.singular(H) + if (!H.singular) + varparam <- qr.solve(H) + else warning("The hessian matrix was singular.\n") + } + else { + idparam <- 1:(nb.b + nb.phi) + idestim <- idparam[-fixpar[[1]]] + Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) + H.singular <- is.singular(Hr) + if (!H.singular) { + Vr <- solve(Hr) + dimnames(Vr) <- list(idestim, idestim) + varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), + ncol = NCOL(H)) + varparam[idestim, idestim] <- Vr + } + } + } + else varparam <- matrix(NA) + if (any(!is.na(varparam))) + dimnames(varparam) <- list(nam, nam) + nbpar <- if (is.null(unlist(fixpar))) + sum(!is.na(param)) + else sum(!is.na(param[-fixpar[[1]]])) + logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) + logL <- -res$value + dev <- -2 * (logL - logL.max) + df.residual <- sum(n > 0) - nbpar + iterations <- res$counts[1] + code <- res$convergence + msg <- if (!is.null(res$message)) + res$message + else character(0) + if (code != 0) + warning("\nPossible convergence problem. Optimization process code: ", + code, " (see ?optim).\n") + new(Class = "glimML", CALL = CALL, link = link, method = "BB", + data = data, formula = formula, random = random, param = param, + varparam = varparam, fixed.param = param[seq(along = namb)], + random.param = param[-seq(along = namb)], logL = logL, + logL.max = logL.max, dev = dev, df.residual = df.residual, + nbpar = nbpar, iterations = iterations, code = code, + msg = msg, singular.hessian = as.numeric(H.singular), + param.ini = param.ini, na.action = na.action) +} + +invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - + exp(-exp(x))) +} + +link <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = qlogis(x), probit = qnorm(x), + log = log(x), cloglog = log(-log(1 - x))) +} + + +pr <- function (object, ...) +{ + .local <- function (object, newdata = NULL, type = c("response", + "link"), se.fit = FALSE, ...) + { + type <- match.arg(type) + mf <- object@CALL + b <- coef(object) + f <- object@formula[-2] + data <- object@data + offset <- NULL + if (is.null(newdata)) { + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + } + else { + mfb <- model.frame(f, newdata) + offset <- model.offset(mfb) + X <- model.matrix(object = f, data = newdata) + } + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + varparam <- object@varparam + varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) + vareta <- X %*% varb %*% t(X) + if (type == "response") { + p <- invlink(eta, type = object@link) + J <- switch(object@link, logit = diag(p * (1 - p), + nrow = length(p)), probit = diag(dnorm( qnorm(p) ), + nrow = length(p)), cloglog = diag(-(1 - p) * + log(1 - p), nrow = length(p)), log = diag(p, + nrow = length(p))) + varp <- J %*% vareta %*% J + se.p <- sqrt(diag(varp)) + } + se.eta <- sqrt(diag(vareta)) + if (!se.fit) + res <- switch(type, response = p, link = eta) + else res <- switch(type, response = list(fit = p, se.fit = se.p), + link = list(fit = eta, se.fit = se.eta)) + res + } + .local(object, ...) +} + +setMethod(predict, "glimML", pr) +setMethod(fitted, "glimML", function (object, ...) { + mf <- object@CALL + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + b <- coef(object) + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + invlink(eta, type = object@link) +} +) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R new file mode 100644 index 0000000..410fdd1 --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R @@ -0,0 +1,14 @@ +library('aod') +args <- commandArgs(trailingOnly=TRUE) +source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) +f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') +x <- read.table(f) +close(f) + + +dens <- data.matrix(x)[,7] + +m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) +h2.bb <- c(coef(m2.bb)) + +print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R new file mode 100644 index 0000000..6aa3d5e --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R @@ -0,0 +1,289 @@ +# Script provided by Kenneth Knoblauch +# This code is based on functions from the aod package of R written by +# Matthieu Lesnoff and Renaud Lancelot +# (https://cran.r-project.org/web/packages/aod/index.html), published +# under the GPL-3 license +# (https://cran.r-project.org/web/licenses/GPL-3). + +betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), + phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), + hessian = TRUE, control = list(maxit = 2000), ...) +{ + CALL <- mf <- match.call(expand.dots = FALSE) + tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", + "", string) + link <- match.arg(link) + if (length(formula) != 3) + stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") + else if (substring(deparse(formula)[1], 1, 5) != "cbind") + stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", + "The response must be a matrix of the form cbind(success, failure)") + if (length(random) == 3) { + form <- deparse(random) + warning("The formula for phi (", form, ") contains a response which is ignored.") + random <- random[-2] + } + explain <- as.character(attr(terms(random), "variables"))[-1] + if (length(explain) > 1) { + warning("The formula for phi contains several explanatory variables (", + paste(explain, collapse = ", "), ").\n", "Only the first one (", + explain[1], ") was considered.") + explain <- explain[1] + } + gf3 <- if (length(explain) == 1) + paste(as.character(formula[3]), explain, sep = " + ") + else as.character(formula[3]) + gf <- formula(paste(formula[2], "~", gf3)) + if (missing(data)) + data <- environment(gf) + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + modmatrix.b <- if (!is.empty.model(mt)) + model.matrix(mt, mfb) + else matrix(, NROW(Y), 0) + Y <- model.response(mfb, "numeric") + weights <- model.weights(mfb) + if (!is.null(weights) && any(weights < 0)) + stop("Negative wts not allowed") + n <- rowSums(Y) + y <- Y[, 1] + if (any(n == 0)) + warning("The data set contains at least one line with weight = 0.\n") + mr <- match(c("random", "data", "na.action"), names(mf), + 0) + mr <- mf[c(1, mr)] + mr$drop.unused.levels <- TRUE + mr[[1]] <- as.name("model.frame") + names(mr)[2] <- "formula" + mr <- eval(mr, parent.frame()) + if (length(explain) == 0) + modmatrix.phi <- model.matrix(object = ~1, data = mr) + else { + express <- paste("model.matrix(object = ~ -1 + ", explain, + ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", + sep = "") + if (is.ordered(data[, match(explain, table = names(mr))])) + warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") + modmatrix.phi <- eval(parse(text = express)) + } + fam <- eval(parse(text = paste("binomial(link =", link, ")"))) + fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) + b <- coef(fm) + if (any(is.na(b))) { + print(nab <- b[is.na(b)]) + stop("Initial values for the fixed effects contain at least one missing value.") + } + nb.b <- ncol(modmatrix.b) + nb.phi <- ncol(modmatrix.phi) + if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) + stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") + else if (is.null(phi.ini)) + phi.ini <- rep(0.1, nb.phi) + param.ini <- c(b, phi.ini) + if (!is.null(unlist(fixpar))) + param.ini[fixpar[[1]]] <- fixpar[[2]] + minuslogL <- function(param) { + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + b <- param[1:nb.b] + eta <- as.vector(modmatrix.b %*% b) + p <- invlink(eta, type = link) + phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + + nb.phi)]) + cnd <- phi == 0 + f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], + log = TRUE) + n2 <- n[!cnd] + y2 <- y[!cnd] + p2 <- p[!cnd] + phi2 <- phi[!cnd] + f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + + y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * + (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) + fn <- sum(c(f1, f2)) + if (!is.finite(fn)) + fn <- -1e+20 + -fn + } + withWarnings <- function(expr) { + myWarnings <- NULL + wHandler <- function(w) { + myWarnings <<- c(myWarnings, list(w)) + invokeRestart("muffleWarning") + } + val <- withCallingHandlers(expr, warning = wHandler) + list(value = val, warnings = myWarnings) + } + reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, + hessian = hessian, control = control, ...)) + res <- reswarn$value + if (warnings) { + if (length(reswarn$warnings) > 0) { + v <- unlist(lapply(reswarn$warnings, as.character)) + tv <- data.frame(message = v, freq = rep(1, length(v))) + cat("Warnings during likelihood maximisation:\n") + print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), + sum)) + } + } + param <- res$par + namb <- colnames(modmatrix.b) + namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") + nam <- c(namb, namphi) + names(param) <- nam + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + H <- H.singular <- Hr.singular <- NA + varparam <- matrix(NA) + is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) + if (hessian) { + H <- res$hessian + if (is.null(unlist(fixpar))) { + H.singular <- is.singular(H) + if (!H.singular) + varparam <- qr.solve(H) + else warning("The hessian matrix was singular.\n") + } + else { + idparam <- 1:(nb.b + nb.phi) + idestim <- idparam[-fixpar[[1]]] + Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) + H.singular <- is.singular(Hr) + if (!H.singular) { + Vr <- solve(Hr) + dimnames(Vr) <- list(idestim, idestim) + varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), + ncol = NCOL(H)) + varparam[idestim, idestim] <- Vr + } + } + } + else varparam <- matrix(NA) + if (any(!is.na(varparam))) + dimnames(varparam) <- list(nam, nam) + nbpar <- if (is.null(unlist(fixpar))) + sum(!is.na(param)) + else sum(!is.na(param[-fixpar[[1]]])) + logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) + logL <- -res$value + dev <- -2 * (logL - logL.max) + df.residual <- sum(n > 0) - nbpar + iterations <- res$counts[1] + code <- res$convergence + msg <- if (!is.null(res$message)) + res$message + else character(0) + if (code != 0) + warning("\nPossible convergence problem. Optimization process code: ", + code, " (see ?optim).\n") + new(Class = "glimML", CALL = CALL, link = link, method = "BB", + data = data, formula = formula, random = random, param = param, + varparam = varparam, fixed.param = param[seq(along = namb)], + random.param = param[-seq(along = namb)], logL = logL, + logL.max = logL.max, dev = dev, df.residual = df.residual, + nbpar = nbpar, iterations = iterations, code = code, + msg = msg, singular.hessian = as.numeric(H.singular), + param.ini = param.ini, na.action = na.action) +} + +invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - + exp(-exp(x))) +} + +link <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = qlogis(x), probit = qnorm(x), + log = log(x), cloglog = log(-log(1 - x))) +} + + +pr <- function (object, ...) +{ + .local <- function (object, newdata = NULL, type = c("response", + "link"), se.fit = FALSE, ...) + { + type <- match.arg(type) + mf <- object@CALL + b <- coef(object) + f <- object@formula[-2] + data <- object@data + offset <- NULL + if (is.null(newdata)) { + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + } + else { + mfb <- model.frame(f, newdata) + offset <- model.offset(mfb) + X <- model.matrix(object = f, data = newdata) + } + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + varparam <- object@varparam + varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) + vareta <- X %*% varb %*% t(X) + if (type == "response") { + p <- invlink(eta, type = object@link) + J <- switch(object@link, logit = diag(p * (1 - p), + nrow = length(p)), probit = diag(dnorm( qnorm(p) ), + nrow = length(p)), cloglog = diag(-(1 - p) * + log(1 - p), nrow = length(p)), log = diag(p, + nrow = length(p))) + varp <- J %*% vareta %*% J + se.p <- sqrt(diag(varp)) + } + se.eta <- sqrt(diag(vareta)) + if (!se.fit) + res <- switch(type, response = p, link = eta) + else res <- switch(type, response = list(fit = p, se.fit = se.p), + link = list(fit = eta, se.fit = se.eta)) + res + } + .local(object, ...) +} + +setMethod(predict, "glimML", pr) +setMethod(fitted, "glimML", function (object, ...) { + mf <- object@CALL + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + b <- coef(object) + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + invlink(eta, type = object@link) +} +) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R new file mode 100644 index 0000000..410fdd1 --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R @@ -0,0 +1,14 @@ +library('aod') +args <- commandArgs(trailingOnly=TRUE) +source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) +f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') +x <- read.table(f) +close(f) + + +dens <- data.matrix(x)[,7] + +m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) +h2.bb <- c(coef(m2.bb)) + +print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R new file mode 100644 index 0000000..6aa3d5e --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R @@ -0,0 +1,289 @@ +# Script provided by Kenneth Knoblauch +# This code is based on functions from the aod package of R written by +# Matthieu Lesnoff and Renaud Lancelot +# (https://cran.r-project.org/web/packages/aod/index.html), published +# under the GPL-3 license +# (https://cran.r-project.org/web/licenses/GPL-3). + +betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), + phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), + hessian = TRUE, control = list(maxit = 2000), ...) +{ + CALL <- mf <- match.call(expand.dots = FALSE) + tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", + "", string) + link <- match.arg(link) + if (length(formula) != 3) + stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") + else if (substring(deparse(formula)[1], 1, 5) != "cbind") + stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", + "The response must be a matrix of the form cbind(success, failure)") + if (length(random) == 3) { + form <- deparse(random) + warning("The formula for phi (", form, ") contains a response which is ignored.") + random <- random[-2] + } + explain <- as.character(attr(terms(random), "variables"))[-1] + if (length(explain) > 1) { + warning("The formula for phi contains several explanatory variables (", + paste(explain, collapse = ", "), ").\n", "Only the first one (", + explain[1], ") was considered.") + explain <- explain[1] + } + gf3 <- if (length(explain) == 1) + paste(as.character(formula[3]), explain, sep = " + ") + else as.character(formula[3]) + gf <- formula(paste(formula[2], "~", gf3)) + if (missing(data)) + data <- environment(gf) + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + modmatrix.b <- if (!is.empty.model(mt)) + model.matrix(mt, mfb) + else matrix(, NROW(Y), 0) + Y <- model.response(mfb, "numeric") + weights <- model.weights(mfb) + if (!is.null(weights) && any(weights < 0)) + stop("Negative wts not allowed") + n <- rowSums(Y) + y <- Y[, 1] + if (any(n == 0)) + warning("The data set contains at least one line with weight = 0.\n") + mr <- match(c("random", "data", "na.action"), names(mf), + 0) + mr <- mf[c(1, mr)] + mr$drop.unused.levels <- TRUE + mr[[1]] <- as.name("model.frame") + names(mr)[2] <- "formula" + mr <- eval(mr, parent.frame()) + if (length(explain) == 0) + modmatrix.phi <- model.matrix(object = ~1, data = mr) + else { + express <- paste("model.matrix(object = ~ -1 + ", explain, + ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", + sep = "") + if (is.ordered(data[, match(explain, table = names(mr))])) + warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") + modmatrix.phi <- eval(parse(text = express)) + } + fam <- eval(parse(text = paste("binomial(link =", link, ")"))) + fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) + b <- coef(fm) + if (any(is.na(b))) { + print(nab <- b[is.na(b)]) + stop("Initial values for the fixed effects contain at least one missing value.") + } + nb.b <- ncol(modmatrix.b) + nb.phi <- ncol(modmatrix.phi) + if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) + stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") + else if (is.null(phi.ini)) + phi.ini <- rep(0.1, nb.phi) + param.ini <- c(b, phi.ini) + if (!is.null(unlist(fixpar))) + param.ini[fixpar[[1]]] <- fixpar[[2]] + minuslogL <- function(param) { + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + b <- param[1:nb.b] + eta <- as.vector(modmatrix.b %*% b) + p <- invlink(eta, type = link) + phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + + nb.phi)]) + cnd <- phi == 0 + f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], + log = TRUE) + n2 <- n[!cnd] + y2 <- y[!cnd] + p2 <- p[!cnd] + phi2 <- phi[!cnd] + f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + + y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * + (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) + fn <- sum(c(f1, f2)) + if (!is.finite(fn)) + fn <- -1e+20 + -fn + } + withWarnings <- function(expr) { + myWarnings <- NULL + wHandler <- function(w) { + myWarnings <<- c(myWarnings, list(w)) + invokeRestart("muffleWarning") + } + val <- withCallingHandlers(expr, warning = wHandler) + list(value = val, warnings = myWarnings) + } + reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, + hessian = hessian, control = control, ...)) + res <- reswarn$value + if (warnings) { + if (length(reswarn$warnings) > 0) { + v <- unlist(lapply(reswarn$warnings, as.character)) + tv <- data.frame(message = v, freq = rep(1, length(v))) + cat("Warnings during likelihood maximisation:\n") + print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), + sum)) + } + } + param <- res$par + namb <- colnames(modmatrix.b) + namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") + nam <- c(namb, namphi) + names(param) <- nam + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + H <- H.singular <- Hr.singular <- NA + varparam <- matrix(NA) + is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) + if (hessian) { + H <- res$hessian + if (is.null(unlist(fixpar))) { + H.singular <- is.singular(H) + if (!H.singular) + varparam <- qr.solve(H) + else warning("The hessian matrix was singular.\n") + } + else { + idparam <- 1:(nb.b + nb.phi) + idestim <- idparam[-fixpar[[1]]] + Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) + H.singular <- is.singular(Hr) + if (!H.singular) { + Vr <- solve(Hr) + dimnames(Vr) <- list(idestim, idestim) + varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), + ncol = NCOL(H)) + varparam[idestim, idestim] <- Vr + } + } + } + else varparam <- matrix(NA) + if (any(!is.na(varparam))) + dimnames(varparam) <- list(nam, nam) + nbpar <- if (is.null(unlist(fixpar))) + sum(!is.na(param)) + else sum(!is.na(param[-fixpar[[1]]])) + logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) + logL <- -res$value + dev <- -2 * (logL - logL.max) + df.residual <- sum(n > 0) - nbpar + iterations <- res$counts[1] + code <- res$convergence + msg <- if (!is.null(res$message)) + res$message + else character(0) + if (code != 0) + warning("\nPossible convergence problem. Optimization process code: ", + code, " (see ?optim).\n") + new(Class = "glimML", CALL = CALL, link = link, method = "BB", + data = data, formula = formula, random = random, param = param, + varparam = varparam, fixed.param = param[seq(along = namb)], + random.param = param[-seq(along = namb)], logL = logL, + logL.max = logL.max, dev = dev, df.residual = df.residual, + nbpar = nbpar, iterations = iterations, code = code, + msg = msg, singular.hessian = as.numeric(H.singular), + param.ini = param.ini, na.action = na.action) +} + +invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - + exp(-exp(x))) +} + +link <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = qlogis(x), probit = qnorm(x), + log = log(x), cloglog = log(-log(1 - x))) +} + + +pr <- function (object, ...) +{ + .local <- function (object, newdata = NULL, type = c("response", + "link"), se.fit = FALSE, ...) + { + type <- match.arg(type) + mf <- object@CALL + b <- coef(object) + f <- object@formula[-2] + data <- object@data + offset <- NULL + if (is.null(newdata)) { + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + } + else { + mfb <- model.frame(f, newdata) + offset <- model.offset(mfb) + X <- model.matrix(object = f, data = newdata) + } + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + varparam <- object@varparam + varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) + vareta <- X %*% varb %*% t(X) + if (type == "response") { + p <- invlink(eta, type = object@link) + J <- switch(object@link, logit = diag(p * (1 - p), + nrow = length(p)), probit = diag(dnorm( qnorm(p) ), + nrow = length(p)), cloglog = diag(-(1 - p) * + log(1 - p), nrow = length(p)), log = diag(p, + nrow = length(p))) + varp <- J %*% vareta %*% J + se.p <- sqrt(diag(varp)) + } + se.eta <- sqrt(diag(vareta)) + if (!se.fit) + res <- switch(type, response = p, link = eta) + else res <- switch(type, response = list(fit = p, se.fit = se.p), + link = list(fit = eta, se.fit = se.eta)) + res + } + .local(object, ...) +} + +setMethod(predict, "glimML", pr) +setMethod(fitted, "glimML", function (object, ...) { + mf <- object@CALL + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + b <- coef(object) + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + invlink(eta, type = object@link) +} +) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R new file mode 100644 index 0000000..410fdd1 --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R @@ -0,0 +1,14 @@ +library('aod') +args <- commandArgs(trailingOnly=TRUE) +source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) +f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') +x <- read.table(f) +close(f) + + +dens <- data.matrix(x)[,7] + +m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) +h2.bb <- c(coef(m2.bb)) + +print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R new file mode 100644 index 0000000..6aa3d5e --- /dev/null +++ b/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R @@ -0,0 +1,289 @@ +# Script provided by Kenneth Knoblauch +# This code is based on functions from the aod package of R written by +# Matthieu Lesnoff and Renaud Lancelot +# (https://cran.r-project.org/web/packages/aod/index.html), published +# under the GPL-3 license +# (https://cran.r-project.org/web/licenses/GPL-3). + +betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), + phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), + hessian = TRUE, control = list(maxit = 2000), ...) +{ + CALL <- mf <- match.call(expand.dots = FALSE) + tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", + "", string) + link <- match.arg(link) + if (length(formula) != 3) + stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") + else if (substring(deparse(formula)[1], 1, 5) != "cbind") + stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", + "The response must be a matrix of the form cbind(success, failure)") + if (length(random) == 3) { + form <- deparse(random) + warning("The formula for phi (", form, ") contains a response which is ignored.") + random <- random[-2] + } + explain <- as.character(attr(terms(random), "variables"))[-1] + if (length(explain) > 1) { + warning("The formula for phi contains several explanatory variables (", + paste(explain, collapse = ", "), ").\n", "Only the first one (", + explain[1], ") was considered.") + explain <- explain[1] + } + gf3 <- if (length(explain) == 1) + paste(as.character(formula[3]), explain, sep = " + ") + else as.character(formula[3]) + gf <- formula(paste(formula[2], "~", gf3)) + if (missing(data)) + data <- environment(gf) + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + modmatrix.b <- if (!is.empty.model(mt)) + model.matrix(mt, mfb) + else matrix(, NROW(Y), 0) + Y <- model.response(mfb, "numeric") + weights <- model.weights(mfb) + if (!is.null(weights) && any(weights < 0)) + stop("Negative wts not allowed") + n <- rowSums(Y) + y <- Y[, 1] + if (any(n == 0)) + warning("The data set contains at least one line with weight = 0.\n") + mr <- match(c("random", "data", "na.action"), names(mf), + 0) + mr <- mf[c(1, mr)] + mr$drop.unused.levels <- TRUE + mr[[1]] <- as.name("model.frame") + names(mr)[2] <- "formula" + mr <- eval(mr, parent.frame()) + if (length(explain) == 0) + modmatrix.phi <- model.matrix(object = ~1, data = mr) + else { + express <- paste("model.matrix(object = ~ -1 + ", explain, + ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", + sep = "") + if (is.ordered(data[, match(explain, table = names(mr))])) + warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") + modmatrix.phi <- eval(parse(text = express)) + } + fam <- eval(parse(text = paste("binomial(link =", link, ")"))) + fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) + b <- coef(fm) + if (any(is.na(b))) { + print(nab <- b[is.na(b)]) + stop("Initial values for the fixed effects contain at least one missing value.") + } + nb.b <- ncol(modmatrix.b) + nb.phi <- ncol(modmatrix.phi) + if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) + stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") + else if (is.null(phi.ini)) + phi.ini <- rep(0.1, nb.phi) + param.ini <- c(b, phi.ini) + if (!is.null(unlist(fixpar))) + param.ini[fixpar[[1]]] <- fixpar[[2]] + minuslogL <- function(param) { + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + b <- param[1:nb.b] + eta <- as.vector(modmatrix.b %*% b) + p <- invlink(eta, type = link) + phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + + nb.phi)]) + cnd <- phi == 0 + f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], + log = TRUE) + n2 <- n[!cnd] + y2 <- y[!cnd] + p2 <- p[!cnd] + phi2 <- phi[!cnd] + f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + + y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * + (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) + fn <- sum(c(f1, f2)) + if (!is.finite(fn)) + fn <- -1e+20 + -fn + } + withWarnings <- function(expr) { + myWarnings <- NULL + wHandler <- function(w) { + myWarnings <<- c(myWarnings, list(w)) + invokeRestart("muffleWarning") + } + val <- withCallingHandlers(expr, warning = wHandler) + list(value = val, warnings = myWarnings) + } + reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, + hessian = hessian, control = control, ...)) + res <- reswarn$value + if (warnings) { + if (length(reswarn$warnings) > 0) { + v <- unlist(lapply(reswarn$warnings, as.character)) + tv <- data.frame(message = v, freq = rep(1, length(v))) + cat("Warnings during likelihood maximisation:\n") + print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), + sum)) + } + } + param <- res$par + namb <- colnames(modmatrix.b) + namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") + nam <- c(namb, namphi) + names(param) <- nam + if (!is.null(unlist(fixpar))) + param[fixpar[[1]]] <- fixpar[[2]] + H <- H.singular <- Hr.singular <- NA + varparam <- matrix(NA) + is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) + if (hessian) { + H <- res$hessian + if (is.null(unlist(fixpar))) { + H.singular <- is.singular(H) + if (!H.singular) + varparam <- qr.solve(H) + else warning("The hessian matrix was singular.\n") + } + else { + idparam <- 1:(nb.b + nb.phi) + idestim <- idparam[-fixpar[[1]]] + Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) + H.singular <- is.singular(Hr) + if (!H.singular) { + Vr <- solve(Hr) + dimnames(Vr) <- list(idestim, idestim) + varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), + ncol = NCOL(H)) + varparam[idestim, idestim] <- Vr + } + } + } + else varparam <- matrix(NA) + if (any(!is.na(varparam))) + dimnames(varparam) <- list(nam, nam) + nbpar <- if (is.null(unlist(fixpar))) + sum(!is.na(param)) + else sum(!is.na(param[-fixpar[[1]]])) + logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) + logL <- -res$value + dev <- -2 * (logL - logL.max) + df.residual <- sum(n > 0) - nbpar + iterations <- res$counts[1] + code <- res$convergence + msg <- if (!is.null(res$message)) + res$message + else character(0) + if (code != 0) + warning("\nPossible convergence problem. Optimization process code: ", + code, " (see ?optim).\n") + new(Class = "glimML", CALL = CALL, link = link, method = "BB", + data = data, formula = formula, random = random, param = param, + varparam = varparam, fixed.param = param[seq(along = namb)], + random.param = param[-seq(along = namb)], logL = logL, + logL.max = logL.max, dev = dev, df.residual = df.residual, + nbpar = nbpar, iterations = iterations, code = code, + msg = msg, singular.hessian = as.numeric(H.singular), + param.ini = param.ini, na.action = na.action) +} + +invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - + exp(-exp(x))) +} + +link <- function (x, type = c("cloglog", "log", "logit", "probit")) +{ + switch(type, logit = qlogis(x), probit = qnorm(x), + log = log(x), cloglog = log(-log(1 - x))) +} + + +pr <- function (object, ...) +{ + .local <- function (object, newdata = NULL, type = c("response", + "link"), se.fit = FALSE, ...) + { + type <- match.arg(type) + mf <- object@CALL + b <- coef(object) + f <- object@formula[-2] + data <- object@data + offset <- NULL + if (is.null(newdata)) { + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + } + else { + mfb <- model.frame(f, newdata) + offset <- model.offset(mfb) + X <- model.matrix(object = f, data = newdata) + } + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + varparam <- object@varparam + varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) + vareta <- X %*% varb %*% t(X) + if (type == "response") { + p <- invlink(eta, type = object@link) + J <- switch(object@link, logit = diag(p * (1 - p), + nrow = length(p)), probit = diag(dnorm( qnorm(p) ), + nrow = length(p)), cloglog = diag(-(1 - p) * + log(1 - p), nrow = length(p)), log = diag(p, + nrow = length(p))) + varp <- J %*% vareta %*% J + se.p <- sqrt(diag(varp)) + } + se.eta <- sqrt(diag(vareta)) + if (!se.fit) + res <- switch(type, response = p, link = eta) + else res <- switch(type, response = list(fit = p, se.fit = se.p), + link = list(fit = eta, se.fit = se.eta)) + res + } + .local(object, ...) +} + +setMethod(predict, "glimML", pr) +setMethod(fitted, "glimML", function (object, ...) { + mf <- object@CALL + mb <- match(c("formula", "data", "na.action"), names(mf), + 0) + mfb <- mf[c(1, mb)] + mfb$drop.unused.levels <- TRUE + mfb[[1]] <- as.name("model.frame") + names(mfb)[2] <- "formula" + mfb <- eval(mfb, parent.frame()) + mt <- attr(mfb, "terms") + Y <- model.response(mfb, "numeric") + X <- if (!is.empty.model(mt)) + model.matrix(mt, mfb, contrasts) + else matrix(, NROW(Y), 0) + offset <- model.offset(mfb) + b <- coef(object) + eta <- as.vector(X %*% b) + eta <- if (is.null(offset)) + eta + else eta + offset + invlink(eta, type = object@link) +} +) -- GitLab From d4b56711741c7f0c14c00bbda1c9ba7019a7988d Mon Sep 17 00:00:00 2001 From: didihou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:18:02 +0200 Subject: [PATCH 04/11] / --- multi-area-model.ipynb | 72 +++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/multi-area-model.ipynb b/multi-area-model.ipynb index 743ce97..85f153c 100644 --- a/multi-area-model.ipynb +++ b/multi-area-model.ipynb @@ -509,7 +509,7 @@ "output_type": "stream", "text": [ "Initializing network from dictionary.\n", - "RAND_DATA_LABEL 7110\n" + "RAND_DATA_LABEL 7968\n" ] }, { @@ -591,72 +591,72 @@ "text": [ "Prepared simulation in 0.00 seconds.\n", "Rank 0: created area V1 with 0 local nodes\n", - "Memory after V1 : 1515.14 MB\n", + "Memory after V1 : 1516.25 MB\n", "Rank 0: created area V2 with 0 local nodes\n", - "Memory after V2 : 1541.85 MB\n", + "Memory after V2 : 1542.84 MB\n", "Rank 0: created area VP with 0 local nodes\n", - "Memory after VP : 1571.02 MB\n", + "Memory after VP : 1572.02 MB\n", "Rank 0: created area V3 with 0 local nodes\n", - "Memory after V3 : 1599.30 MB\n", + "Memory after V3 : 1600.25 MB\n", "Rank 0: created area V3A with 0 local nodes\n", - "Memory after V3A : 1619.24 MB\n", + "Memory after V3A : 1620.27 MB\n", "Rank 0: created area MT with 0 local nodes\n", - "Memory after MT : 1644.78 MB\n", + "Memory after MT : 1645.80 MB\n", "Rank 0: created area V4t with 0 local nodes\n", - "Memory after V4t : 1669.75 MB\n", + "Memory after V4t : 1670.73 MB\n", "Rank 0: created area V4 with 0 local nodes\n", - "Memory after V4 : 1696.68 MB\n", + "Memory after V4 : 1697.67 MB\n", "Rank 0: created area VOT with 0 local nodes\n", - "Memory after VOT : 1721.41 MB\n", + "Memory after VOT : 1722.37 MB\n", "Rank 0: created area MSTd with 0 local nodes\n", - "Memory after MSTd : 1741.30 MB\n", + "Memory after MSTd : 1742.29 MB\n", "Rank 0: created area PIP with 0 local nodes\n", - "Memory after PIP : 1762.64 MB\n", + "Memory after PIP : 1763.63 MB\n", "Rank 0: created area PO with 0 local nodes\n", - "Memory after PO : 1784.18 MB\n", + "Memory after PO : 1785.13 MB\n", "Rank 0: created area DP with 0 local nodes\n", - "Memory after DP : 1804.32 MB\n", + "Memory after DP : 1805.43 MB\n", "Rank 0: created area MIP with 0 local nodes\n", - "Memory after MIP : 1825.89 MB\n", + "Memory after MIP : 1826.88 MB\n", "Rank 0: created area MDP with 0 local nodes\n", - "Memory after MDP : 1847.39 MB\n", + "Memory after MDP : 1848.38 MB\n", "Rank 0: created area VIP with 0 local nodes\n", - "Memory after VIP : 1869.36 MB\n", + "Memory after VIP : 1870.34 MB\n", "Rank 0: created area LIP with 0 local nodes\n", - "Memory after LIP : 1893.33 MB\n", + "Memory after LIP : 1894.24 MB\n", "Rank 0: created area PITv with 0 local nodes\n", - "Memory after PITv : 1918.53 MB\n", + "Memory after PITv : 1919.55 MB\n", "Rank 0: created area PITd with 0 local nodes\n", - "Memory after PITd : 1943.70 MB\n", + "Memory after PITd : 1944.77 MB\n", "Rank 0: created area MSTl with 0 local nodes\n", - "Memory after MSTl : 1965.03 MB\n", + "Memory after MSTl : 1965.95 MB\n", "Rank 0: created area CITv with 0 local nodes\n", - "Memory after CITv : 1983.83 MB\n", + "Memory after CITv : 1984.88 MB\n", "Rank 0: created area CITd with 0 local nodes\n", - "Memory after CITd : 2003.14 MB\n", + "Memory after CITd : 2004.21 MB\n", "Rank 0: created area FEF with 0 local nodes\n", - "Memory after FEF : 2024.65 MB\n", + "Memory after FEF : 2025.55 MB\n", "Rank 0: created area TF with 0 local nodes\n", - "Memory after TF : 2040.28 MB\n", + "Memory after TF : 2041.22 MB\n", "Rank 0: created area AITv with 0 local nodes\n", - "Memory after AITv : 2062.86 MB\n", + "Memory after AITv : 2063.93 MB\n", "Rank 0: created area FST with 0 local nodes\n", - "Memory after FST : 2079.55 MB\n", + "Memory after FST : 2080.65 MB\n", "Rank 0: created area 7a with 0 local nodes\n", - "Memory after 7a : 2100.87 MB\n", + "Memory after 7a : 2101.82 MB\n", "Rank 0: created area STPp with 0 local nodes\n", - "Memory after STPp : 2119.46 MB\n", + "Memory after STPp : 2120.56 MB\n", "Rank 0: created area STPa with 0 local nodes\n", - "Memory after STPa : 2138.71 MB\n", + "Memory after STPa : 2139.57 MB\n", "Rank 0: created area 46 with 0 local nodes\n", - "Memory after 46 : 2154.03 MB\n", + "Memory after 46 : 2155.02 MB\n", "Rank 0: created area AITd with 0 local nodes\n", - "Memory after AITd : 2176.71 MB\n", + "Memory after AITd : 2177.61 MB\n", "Rank 0: created area TH with 0 local nodes\n", - "Memory after TH : 2189.28 MB\n", - "Created areas and internal connections in 2.05 seconds.\n", - "Created cortico-cortical connections in 20.48 seconds.\n", - "Simulated network in 71.65 seconds.\n" + "Memory after TH : 2190.27 MB\n", + "Created areas and internal connections in 2.20 seconds.\n", + "Created cortico-cortical connections in 20.14 seconds.\n", + "Simulated network in 77.14 seconds.\n" ] } ], -- GitLab From 783702d8c71e932152202f4e284f401d0c416fc9 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:24:09 +0200 Subject: [PATCH 05/11] Update .gitignore in .gitignore files added multiarea_model/data_multiarea/.ipynb_checkpoints/* to ignore all checkpoints files --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 4cee63c..d35f0f8 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,5 @@ figures/Schmidt2018/.ipynb_checkpoints/* figures/Schmidt2018_dyn/.ipynb_checkpoints/* multiarea_model/.ipynb_checkpoints/* + +multiarea_model/data_multiarea/.ipynb_checkpoints/* -- GitLab From 6476be2e98133bb298020fcaabade19edacd6130 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:42:51 +0200 Subject: [PATCH 06/11] Create VERSION_LOG.md --- VERSION_LOG.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 VERSION_LOG.md diff --git a/VERSION_LOG.md b/VERSION_LOG.md new file mode 100644 index 0000000..e838285 --- /dev/null +++ b/VERSION_LOG.md @@ -0,0 +1,18 @@ +# This is the log file recording the changes made for every release of versions of mulit-area model. + + +## MAM 1.1.0 + +### Updates + +### Bug fixes + + +## MAM 1.0.0 + +MAM 1.0.0 is the first release of the multi-area model, which has been documented and in line with the relevant publications. + +### Bug fixes +* corrected the url of NEST logo in README.md + + -- GitLab From bc4f892a281c50beb6c1d6a5f69a8609bf98b9e3 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 15:51:30 +0200 Subject: [PATCH 07/11] Update VERSION_LOG.md --- VERSION_LOG.md | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/VERSION_LOG.md b/VERSION_LOG.md index e838285..27f20fa 100644 --- a/VERSION_LOG.md +++ b/VERSION_LOG.md @@ -1,16 +1,28 @@ -# This is the log file recording the changes made for every release of versions of mulit-area model. - - ## MAM 1.1.0 ### Updates +* imporved the notebook structure of multi-area-model.ipynb +* added ### Bug fixes +* corrected the separator from "" to "/" in ./multiarea_model/data_multiarea/SLN_logdensities.R to fix the file path of ./multiarea_model/data_multiarea/bbAlt.R ## MAM 1.0.0 -MAM 1.0.0 is the first release of the multi-area model, which has been documented and in line with the relevant publications. +This code implements the spiking network model of macaque visual cortex developed at the Institute of Neuroscience and Medicine (INM-6), Research Center Jülich. + +The model has been documented in the following publications: + +1. Schmidt M, Bakker R, Hilgetag CC, Diesmann M & van Albada SJ Multi-scale account of the network structure of macaque visual cortex Brain Structure and Function (2018), 223: 1409 https://doi.org/10.1007/s00429-017-1554-4 + +2. Schuecker J, Schmidt M, van Albada SJ, Diesmann M & Helias M (2017) Fundamental Activity Constraints Lead to Specific Interpretations of the Connectome. PLOS Computational Biology, 13(2): e1005179. https://doi.org/10.1371/journal.pcbi.1005179 + +3. Schmidt M, Bakker R, Shen K, Bezgin B, Diesmann M & van Albada SJ (2018) A multi-scale layer-resolved spiking network model of resting-state dynamics in macaque cortex. PLOS Computational Biology, 14(9): e1006359. https://doi.org/10.1371/journal.pcbi.1006359 + +The code in this repository is self-contained and allows one to reproduce the results of all three papers. + +The code was improved since the above publications to function with more modern versions of NEST (3+) as well as other minor updates. ### Bug fixes * corrected the url of NEST logo in README.md -- GitLab From 1235cc5d258c72523491cab33b20999e6dac3bb2 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 16:05:00 +0200 Subject: [PATCH 08/11] Update VERSION_LOG.md --- VERSION_LOG.md | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/VERSION_LOG.md b/VERSION_LOG.md index 27f20fa..2a688df 100644 --- a/VERSION_LOG.md +++ b/VERSION_LOG.md @@ -1,14 +1,27 @@ -## MAM 1.1.0 +## MAM v1.1.0 -### Updates -* imporved the notebook structure of multi-area-model.ipynb -* added +This release brings a number of improvements and new features, including the down-scaling multi-area model, which enhance the model's reliability, usability, and overall user experience. It also serves as a base version that specifies the dependencies required for our future releases up to and including MAM v2.0.0. + +For detailed changes made in this release since our last release MAM v1.0.0, please check our [version log][https://github.com/INM-6/MAM2EBRAINS/edit/pre-cr-pr/VERSION_LOG.md]. + +### Enhancements + +* The structure of the notebook has been revamped for improved clarity and user navigation. + +* Enhanced Parameter Descriptions: Parameters are now accompanied by more detailed descriptions, providing users with a clearer understanding and better control over model configurations. + +* Visualization Enhancements: Dive deeper into your simulation results with the addition of new plots that offer richer insights into the model's behavior. + +* Try it on EBRAINS: For users looking for a seamless experience, we've integrated a "Try it on EBRAINS" button. Dive right in and experience MAM on the EBRAINS platform! + +* Improved Documentation: The README.de file has been updated with comprehensive user instructions to guide both new and returning users through the features and functionalities of MAM. ### Bug fixes -* corrected the separator from "" to "/" in ./multiarea_model/data_multiarea/SLN_logdensities.R to fix the file path of ./multiarea_model/data_multiarea/bbAlt.R + +* Corrected the separator from "" to "/" in ./multiarea_model/data_multiarea/SLN_logdensities.R to fix the file path of ./multiarea_model/data_multiarea/bbAlt.R -## MAM 1.0.0 +## MAM v1.0.0 This code implements the spiking network model of macaque visual cortex developed at the Institute of Neuroscience and Medicine (INM-6), Research Center Jülich. @@ -25,6 +38,6 @@ The code in this repository is self-contained and allows one to reproduce the re The code was improved since the above publications to function with more modern versions of NEST (3+) as well as other minor updates. ### Bug fixes -* corrected the url of NEST logo in README.md +* corrected the URL of NEST logo in README.md -- GitLab From ebd96c0ba6a7fcc05b8d419f653f400bfa67e95b Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 16:10:13 +0200 Subject: [PATCH 09/11] Update VERSION_LOG.md --- VERSION_LOG.md | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/VERSION_LOG.md b/VERSION_LOG.md index 2a688df..2ee11f3 100644 --- a/VERSION_LOG.md +++ b/VERSION_LOG.md @@ -1,9 +1,5 @@ ## MAM v1.1.0 -This release brings a number of improvements and new features, including the down-scaling multi-area model, which enhance the model's reliability, usability, and overall user experience. It also serves as a base version that specifies the dependencies required for our future releases up to and including MAM v2.0.0. - -For detailed changes made in this release since our last release MAM v1.0.0, please check our [version log][https://github.com/INM-6/MAM2EBRAINS/edit/pre-cr-pr/VERSION_LOG.md]. - ### Enhancements * The structure of the notebook has been revamped for improved clarity and user navigation. @@ -23,20 +19,6 @@ For detailed changes made in this release since our last release MAM v1.0.0, ple ## MAM v1.0.0 -This code implements the spiking network model of macaque visual cortex developed at the Institute of Neuroscience and Medicine (INM-6), Research Center Jülich. - -The model has been documented in the following publications: - -1. Schmidt M, Bakker R, Hilgetag CC, Diesmann M & van Albada SJ Multi-scale account of the network structure of macaque visual cortex Brain Structure and Function (2018), 223: 1409 https://doi.org/10.1007/s00429-017-1554-4 - -2. Schuecker J, Schmidt M, van Albada SJ, Diesmann M & Helias M (2017) Fundamental Activity Constraints Lead to Specific Interpretations of the Connectome. PLOS Computational Biology, 13(2): e1005179. https://doi.org/10.1371/journal.pcbi.1005179 - -3. Schmidt M, Bakker R, Shen K, Bezgin B, Diesmann M & van Albada SJ (2018) A multi-scale layer-resolved spiking network model of resting-state dynamics in macaque cortex. PLOS Computational Biology, 14(9): e1006359. https://doi.org/10.1371/journal.pcbi.1006359 - -The code in this repository is self-contained and allows one to reproduce the results of all three papers. - -The code was improved since the above publications to function with more modern versions of NEST (3+) as well as other minor updates. - ### Bug fixes * corrected the URL of NEST logo in README.md -- GitLab From 3a215e3a4861b0170b123c6c1d6d2ba67e5d6763 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 16:13:16 +0200 Subject: [PATCH 10/11] Delete multiarea_model/data_multiarea/.ipynb_checkpoints directory --- ...ckpoint-checkpoint-checkpoint-checkpoint.R | 14 - ...ckpoint-checkpoint-checkpoint-checkpoint.R | 289 ------------------ ...nsities-checkpoint-checkpoint-checkpoint.R | 14 - .../bbAlt-checkpoint-checkpoint-checkpoint.R | 289 ------------------ .../SLN_logdensities-checkpoint-checkpoint.R | 14 - .../bbAlt-checkpoint-checkpoint.R | 289 ------------------ .../SLN_logdensities-checkpoint.R | 14 - .../.ipynb_checkpoints/bbAlt-checkpoint.R | 289 ------------------ 8 files changed, 1212 deletions(-) delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R delete mode 100644 multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R deleted file mode 100644 index 410fdd1..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint-checkpoint.R +++ /dev/null @@ -1,14 +0,0 @@ -library('aod') -args <- commandArgs(trailingOnly=TRUE) -source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) -f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') -x <- read.table(f) -close(f) - - -dens <- data.matrix(x)[,7] - -m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) -h2.bb <- c(coef(m2.bb)) - -print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R deleted file mode 100644 index 6aa3d5e..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint-checkpoint.R +++ /dev/null @@ -1,289 +0,0 @@ -# Script provided by Kenneth Knoblauch -# This code is based on functions from the aod package of R written by -# Matthieu Lesnoff and Renaud Lancelot -# (https://cran.r-project.org/web/packages/aod/index.html), published -# under the GPL-3 license -# (https://cran.r-project.org/web/licenses/GPL-3). - -betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), - phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), - hessian = TRUE, control = list(maxit = 2000), ...) -{ - CALL <- mf <- match.call(expand.dots = FALSE) - tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", - "", string) - link <- match.arg(link) - if (length(formula) != 3) - stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") - else if (substring(deparse(formula)[1], 1, 5) != "cbind") - stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", - "The response must be a matrix of the form cbind(success, failure)") - if (length(random) == 3) { - form <- deparse(random) - warning("The formula for phi (", form, ") contains a response which is ignored.") - random <- random[-2] - } - explain <- as.character(attr(terms(random), "variables"))[-1] - if (length(explain) > 1) { - warning("The formula for phi contains several explanatory variables (", - paste(explain, collapse = ", "), ").\n", "Only the first one (", - explain[1], ") was considered.") - explain <- explain[1] - } - gf3 <- if (length(explain) == 1) - paste(as.character(formula[3]), explain, sep = " + ") - else as.character(formula[3]) - gf <- formula(paste(formula[2], "~", gf3)) - if (missing(data)) - data <- environment(gf) - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - modmatrix.b <- if (!is.empty.model(mt)) - model.matrix(mt, mfb) - else matrix(, NROW(Y), 0) - Y <- model.response(mfb, "numeric") - weights <- model.weights(mfb) - if (!is.null(weights) && any(weights < 0)) - stop("Negative wts not allowed") - n <- rowSums(Y) - y <- Y[, 1] - if (any(n == 0)) - warning("The data set contains at least one line with weight = 0.\n") - mr <- match(c("random", "data", "na.action"), names(mf), - 0) - mr <- mf[c(1, mr)] - mr$drop.unused.levels <- TRUE - mr[[1]] <- as.name("model.frame") - names(mr)[2] <- "formula" - mr <- eval(mr, parent.frame()) - if (length(explain) == 0) - modmatrix.phi <- model.matrix(object = ~1, data = mr) - else { - express <- paste("model.matrix(object = ~ -1 + ", explain, - ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", - sep = "") - if (is.ordered(data[, match(explain, table = names(mr))])) - warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") - modmatrix.phi <- eval(parse(text = express)) - } - fam <- eval(parse(text = paste("binomial(link =", link, ")"))) - fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) - b <- coef(fm) - if (any(is.na(b))) { - print(nab <- b[is.na(b)]) - stop("Initial values for the fixed effects contain at least one missing value.") - } - nb.b <- ncol(modmatrix.b) - nb.phi <- ncol(modmatrix.phi) - if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) - stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") - else if (is.null(phi.ini)) - phi.ini <- rep(0.1, nb.phi) - param.ini <- c(b, phi.ini) - if (!is.null(unlist(fixpar))) - param.ini[fixpar[[1]]] <- fixpar[[2]] - minuslogL <- function(param) { - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - b <- param[1:nb.b] - eta <- as.vector(modmatrix.b %*% b) - p <- invlink(eta, type = link) - phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + - nb.phi)]) - cnd <- phi == 0 - f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], - log = TRUE) - n2 <- n[!cnd] - y2 <- y[!cnd] - p2 <- p[!cnd] - phi2 <- phi[!cnd] - f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + - y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * - (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) - fn <- sum(c(f1, f2)) - if (!is.finite(fn)) - fn <- -1e+20 - -fn - } - withWarnings <- function(expr) { - myWarnings <- NULL - wHandler <- function(w) { - myWarnings <<- c(myWarnings, list(w)) - invokeRestart("muffleWarning") - } - val <- withCallingHandlers(expr, warning = wHandler) - list(value = val, warnings = myWarnings) - } - reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, - hessian = hessian, control = control, ...)) - res <- reswarn$value - if (warnings) { - if (length(reswarn$warnings) > 0) { - v <- unlist(lapply(reswarn$warnings, as.character)) - tv <- data.frame(message = v, freq = rep(1, length(v))) - cat("Warnings during likelihood maximisation:\n") - print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), - sum)) - } - } - param <- res$par - namb <- colnames(modmatrix.b) - namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") - nam <- c(namb, namphi) - names(param) <- nam - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - H <- H.singular <- Hr.singular <- NA - varparam <- matrix(NA) - is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) - if (hessian) { - H <- res$hessian - if (is.null(unlist(fixpar))) { - H.singular <- is.singular(H) - if (!H.singular) - varparam <- qr.solve(H) - else warning("The hessian matrix was singular.\n") - } - else { - idparam <- 1:(nb.b + nb.phi) - idestim <- idparam[-fixpar[[1]]] - Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) - H.singular <- is.singular(Hr) - if (!H.singular) { - Vr <- solve(Hr) - dimnames(Vr) <- list(idestim, idestim) - varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), - ncol = NCOL(H)) - varparam[idestim, idestim] <- Vr - } - } - } - else varparam <- matrix(NA) - if (any(!is.na(varparam))) - dimnames(varparam) <- list(nam, nam) - nbpar <- if (is.null(unlist(fixpar))) - sum(!is.na(param)) - else sum(!is.na(param[-fixpar[[1]]])) - logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) - logL <- -res$value - dev <- -2 * (logL - logL.max) - df.residual <- sum(n > 0) - nbpar - iterations <- res$counts[1] - code <- res$convergence - msg <- if (!is.null(res$message)) - res$message - else character(0) - if (code != 0) - warning("\nPossible convergence problem. Optimization process code: ", - code, " (see ?optim).\n") - new(Class = "glimML", CALL = CALL, link = link, method = "BB", - data = data, formula = formula, random = random, param = param, - varparam = varparam, fixed.param = param[seq(along = namb)], - random.param = param[-seq(along = namb)], logL = logL, - logL.max = logL.max, dev = dev, df.residual = df.residual, - nbpar = nbpar, iterations = iterations, code = code, - msg = msg, singular.hessian = as.numeric(H.singular), - param.ini = param.ini, na.action = na.action) -} - -invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - - exp(-exp(x))) -} - -link <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = qlogis(x), probit = qnorm(x), - log = log(x), cloglog = log(-log(1 - x))) -} - - -pr <- function (object, ...) -{ - .local <- function (object, newdata = NULL, type = c("response", - "link"), se.fit = FALSE, ...) - { - type <- match.arg(type) - mf <- object@CALL - b <- coef(object) - f <- object@formula[-2] - data <- object@data - offset <- NULL - if (is.null(newdata)) { - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - } - else { - mfb <- model.frame(f, newdata) - offset <- model.offset(mfb) - X <- model.matrix(object = f, data = newdata) - } - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - varparam <- object@varparam - varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) - vareta <- X %*% varb %*% t(X) - if (type == "response") { - p <- invlink(eta, type = object@link) - J <- switch(object@link, logit = diag(p * (1 - p), - nrow = length(p)), probit = diag(dnorm( qnorm(p) ), - nrow = length(p)), cloglog = diag(-(1 - p) * - log(1 - p), nrow = length(p)), log = diag(p, - nrow = length(p))) - varp <- J %*% vareta %*% J - se.p <- sqrt(diag(varp)) - } - se.eta <- sqrt(diag(vareta)) - if (!se.fit) - res <- switch(type, response = p, link = eta) - else res <- switch(type, response = list(fit = p, se.fit = se.p), - link = list(fit = eta, se.fit = se.eta)) - res - } - .local(object, ...) -} - -setMethod(predict, "glimML", pr) -setMethod(fitted, "glimML", function (object, ...) { - mf <- object@CALL - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - b <- coef(object) - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - invlink(eta, type = object@link) -} -) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R deleted file mode 100644 index 410fdd1..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint-checkpoint.R +++ /dev/null @@ -1,14 +0,0 @@ -library('aod') -args <- commandArgs(trailingOnly=TRUE) -source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) -f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') -x <- read.table(f) -close(f) - - -dens <- data.matrix(x)[,7] - -m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) -h2.bb <- c(coef(m2.bb)) - -print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R deleted file mode 100644 index 6aa3d5e..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint-checkpoint.R +++ /dev/null @@ -1,289 +0,0 @@ -# Script provided by Kenneth Knoblauch -# This code is based on functions from the aod package of R written by -# Matthieu Lesnoff and Renaud Lancelot -# (https://cran.r-project.org/web/packages/aod/index.html), published -# under the GPL-3 license -# (https://cran.r-project.org/web/licenses/GPL-3). - -betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), - phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), - hessian = TRUE, control = list(maxit = 2000), ...) -{ - CALL <- mf <- match.call(expand.dots = FALSE) - tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", - "", string) - link <- match.arg(link) - if (length(formula) != 3) - stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") - else if (substring(deparse(formula)[1], 1, 5) != "cbind") - stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", - "The response must be a matrix of the form cbind(success, failure)") - if (length(random) == 3) { - form <- deparse(random) - warning("The formula for phi (", form, ") contains a response which is ignored.") - random <- random[-2] - } - explain <- as.character(attr(terms(random), "variables"))[-1] - if (length(explain) > 1) { - warning("The formula for phi contains several explanatory variables (", - paste(explain, collapse = ", "), ").\n", "Only the first one (", - explain[1], ") was considered.") - explain <- explain[1] - } - gf3 <- if (length(explain) == 1) - paste(as.character(formula[3]), explain, sep = " + ") - else as.character(formula[3]) - gf <- formula(paste(formula[2], "~", gf3)) - if (missing(data)) - data <- environment(gf) - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - modmatrix.b <- if (!is.empty.model(mt)) - model.matrix(mt, mfb) - else matrix(, NROW(Y), 0) - Y <- model.response(mfb, "numeric") - weights <- model.weights(mfb) - if (!is.null(weights) && any(weights < 0)) - stop("Negative wts not allowed") - n <- rowSums(Y) - y <- Y[, 1] - if (any(n == 0)) - warning("The data set contains at least one line with weight = 0.\n") - mr <- match(c("random", "data", "na.action"), names(mf), - 0) - mr <- mf[c(1, mr)] - mr$drop.unused.levels <- TRUE - mr[[1]] <- as.name("model.frame") - names(mr)[2] <- "formula" - mr <- eval(mr, parent.frame()) - if (length(explain) == 0) - modmatrix.phi <- model.matrix(object = ~1, data = mr) - else { - express <- paste("model.matrix(object = ~ -1 + ", explain, - ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", - sep = "") - if (is.ordered(data[, match(explain, table = names(mr))])) - warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") - modmatrix.phi <- eval(parse(text = express)) - } - fam <- eval(parse(text = paste("binomial(link =", link, ")"))) - fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) - b <- coef(fm) - if (any(is.na(b))) { - print(nab <- b[is.na(b)]) - stop("Initial values for the fixed effects contain at least one missing value.") - } - nb.b <- ncol(modmatrix.b) - nb.phi <- ncol(modmatrix.phi) - if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) - stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") - else if (is.null(phi.ini)) - phi.ini <- rep(0.1, nb.phi) - param.ini <- c(b, phi.ini) - if (!is.null(unlist(fixpar))) - param.ini[fixpar[[1]]] <- fixpar[[2]] - minuslogL <- function(param) { - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - b <- param[1:nb.b] - eta <- as.vector(modmatrix.b %*% b) - p <- invlink(eta, type = link) - phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + - nb.phi)]) - cnd <- phi == 0 - f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], - log = TRUE) - n2 <- n[!cnd] - y2 <- y[!cnd] - p2 <- p[!cnd] - phi2 <- phi[!cnd] - f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + - y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * - (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) - fn <- sum(c(f1, f2)) - if (!is.finite(fn)) - fn <- -1e+20 - -fn - } - withWarnings <- function(expr) { - myWarnings <- NULL - wHandler <- function(w) { - myWarnings <<- c(myWarnings, list(w)) - invokeRestart("muffleWarning") - } - val <- withCallingHandlers(expr, warning = wHandler) - list(value = val, warnings = myWarnings) - } - reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, - hessian = hessian, control = control, ...)) - res <- reswarn$value - if (warnings) { - if (length(reswarn$warnings) > 0) { - v <- unlist(lapply(reswarn$warnings, as.character)) - tv <- data.frame(message = v, freq = rep(1, length(v))) - cat("Warnings during likelihood maximisation:\n") - print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), - sum)) - } - } - param <- res$par - namb <- colnames(modmatrix.b) - namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") - nam <- c(namb, namphi) - names(param) <- nam - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - H <- H.singular <- Hr.singular <- NA - varparam <- matrix(NA) - is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) - if (hessian) { - H <- res$hessian - if (is.null(unlist(fixpar))) { - H.singular <- is.singular(H) - if (!H.singular) - varparam <- qr.solve(H) - else warning("The hessian matrix was singular.\n") - } - else { - idparam <- 1:(nb.b + nb.phi) - idestim <- idparam[-fixpar[[1]]] - Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) - H.singular <- is.singular(Hr) - if (!H.singular) { - Vr <- solve(Hr) - dimnames(Vr) <- list(idestim, idestim) - varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), - ncol = NCOL(H)) - varparam[idestim, idestim] <- Vr - } - } - } - else varparam <- matrix(NA) - if (any(!is.na(varparam))) - dimnames(varparam) <- list(nam, nam) - nbpar <- if (is.null(unlist(fixpar))) - sum(!is.na(param)) - else sum(!is.na(param[-fixpar[[1]]])) - logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) - logL <- -res$value - dev <- -2 * (logL - logL.max) - df.residual <- sum(n > 0) - nbpar - iterations <- res$counts[1] - code <- res$convergence - msg <- if (!is.null(res$message)) - res$message - else character(0) - if (code != 0) - warning("\nPossible convergence problem. Optimization process code: ", - code, " (see ?optim).\n") - new(Class = "glimML", CALL = CALL, link = link, method = "BB", - data = data, formula = formula, random = random, param = param, - varparam = varparam, fixed.param = param[seq(along = namb)], - random.param = param[-seq(along = namb)], logL = logL, - logL.max = logL.max, dev = dev, df.residual = df.residual, - nbpar = nbpar, iterations = iterations, code = code, - msg = msg, singular.hessian = as.numeric(H.singular), - param.ini = param.ini, na.action = na.action) -} - -invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - - exp(-exp(x))) -} - -link <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = qlogis(x), probit = qnorm(x), - log = log(x), cloglog = log(-log(1 - x))) -} - - -pr <- function (object, ...) -{ - .local <- function (object, newdata = NULL, type = c("response", - "link"), se.fit = FALSE, ...) - { - type <- match.arg(type) - mf <- object@CALL - b <- coef(object) - f <- object@formula[-2] - data <- object@data - offset <- NULL - if (is.null(newdata)) { - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - } - else { - mfb <- model.frame(f, newdata) - offset <- model.offset(mfb) - X <- model.matrix(object = f, data = newdata) - } - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - varparam <- object@varparam - varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) - vareta <- X %*% varb %*% t(X) - if (type == "response") { - p <- invlink(eta, type = object@link) - J <- switch(object@link, logit = diag(p * (1 - p), - nrow = length(p)), probit = diag(dnorm( qnorm(p) ), - nrow = length(p)), cloglog = diag(-(1 - p) * - log(1 - p), nrow = length(p)), log = diag(p, - nrow = length(p))) - varp <- J %*% vareta %*% J - se.p <- sqrt(diag(varp)) - } - se.eta <- sqrt(diag(vareta)) - if (!se.fit) - res <- switch(type, response = p, link = eta) - else res <- switch(type, response = list(fit = p, se.fit = se.p), - link = list(fit = eta, se.fit = se.eta)) - res - } - .local(object, ...) -} - -setMethod(predict, "glimML", pr) -setMethod(fitted, "glimML", function (object, ...) { - mf <- object@CALL - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - b <- coef(object) - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - invlink(eta, type = object@link) -} -) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R deleted file mode 100644 index 410fdd1..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/SLN_logdensities-checkpoint-checkpoint.R +++ /dev/null @@ -1,14 +0,0 @@ -library('aod') -args <- commandArgs(trailingOnly=TRUE) -source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) -f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') -x <- read.table(f) -close(f) - - -dens <- data.matrix(x)[,7] - -m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) -h2.bb <- c(coef(m2.bb)) - -print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R deleted file mode 100644 index 6aa3d5e..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/.ipynb_checkpoints/bbAlt-checkpoint-checkpoint.R +++ /dev/null @@ -1,289 +0,0 @@ -# Script provided by Kenneth Knoblauch -# This code is based on functions from the aod package of R written by -# Matthieu Lesnoff and Renaud Lancelot -# (https://cran.r-project.org/web/packages/aod/index.html), published -# under the GPL-3 license -# (https://cran.r-project.org/web/licenses/GPL-3). - -betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), - phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), - hessian = TRUE, control = list(maxit = 2000), ...) -{ - CALL <- mf <- match.call(expand.dots = FALSE) - tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", - "", string) - link <- match.arg(link) - if (length(formula) != 3) - stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") - else if (substring(deparse(formula)[1], 1, 5) != "cbind") - stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", - "The response must be a matrix of the form cbind(success, failure)") - if (length(random) == 3) { - form <- deparse(random) - warning("The formula for phi (", form, ") contains a response which is ignored.") - random <- random[-2] - } - explain <- as.character(attr(terms(random), "variables"))[-1] - if (length(explain) > 1) { - warning("The formula for phi contains several explanatory variables (", - paste(explain, collapse = ", "), ").\n", "Only the first one (", - explain[1], ") was considered.") - explain <- explain[1] - } - gf3 <- if (length(explain) == 1) - paste(as.character(formula[3]), explain, sep = " + ") - else as.character(formula[3]) - gf <- formula(paste(formula[2], "~", gf3)) - if (missing(data)) - data <- environment(gf) - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - modmatrix.b <- if (!is.empty.model(mt)) - model.matrix(mt, mfb) - else matrix(, NROW(Y), 0) - Y <- model.response(mfb, "numeric") - weights <- model.weights(mfb) - if (!is.null(weights) && any(weights < 0)) - stop("Negative wts not allowed") - n <- rowSums(Y) - y <- Y[, 1] - if (any(n == 0)) - warning("The data set contains at least one line with weight = 0.\n") - mr <- match(c("random", "data", "na.action"), names(mf), - 0) - mr <- mf[c(1, mr)] - mr$drop.unused.levels <- TRUE - mr[[1]] <- as.name("model.frame") - names(mr)[2] <- "formula" - mr <- eval(mr, parent.frame()) - if (length(explain) == 0) - modmatrix.phi <- model.matrix(object = ~1, data = mr) - else { - express <- paste("model.matrix(object = ~ -1 + ", explain, - ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", - sep = "") - if (is.ordered(data[, match(explain, table = names(mr))])) - warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") - modmatrix.phi <- eval(parse(text = express)) - } - fam <- eval(parse(text = paste("binomial(link =", link, ")"))) - fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) - b <- coef(fm) - if (any(is.na(b))) { - print(nab <- b[is.na(b)]) - stop("Initial values for the fixed effects contain at least one missing value.") - } - nb.b <- ncol(modmatrix.b) - nb.phi <- ncol(modmatrix.phi) - if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) - stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") - else if (is.null(phi.ini)) - phi.ini <- rep(0.1, nb.phi) - param.ini <- c(b, phi.ini) - if (!is.null(unlist(fixpar))) - param.ini[fixpar[[1]]] <- fixpar[[2]] - minuslogL <- function(param) { - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - b <- param[1:nb.b] - eta <- as.vector(modmatrix.b %*% b) - p <- invlink(eta, type = link) - phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + - nb.phi)]) - cnd <- phi == 0 - f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], - log = TRUE) - n2 <- n[!cnd] - y2 <- y[!cnd] - p2 <- p[!cnd] - phi2 <- phi[!cnd] - f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + - y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * - (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) - fn <- sum(c(f1, f2)) - if (!is.finite(fn)) - fn <- -1e+20 - -fn - } - withWarnings <- function(expr) { - myWarnings <- NULL - wHandler <- function(w) { - myWarnings <<- c(myWarnings, list(w)) - invokeRestart("muffleWarning") - } - val <- withCallingHandlers(expr, warning = wHandler) - list(value = val, warnings = myWarnings) - } - reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, - hessian = hessian, control = control, ...)) - res <- reswarn$value - if (warnings) { - if (length(reswarn$warnings) > 0) { - v <- unlist(lapply(reswarn$warnings, as.character)) - tv <- data.frame(message = v, freq = rep(1, length(v))) - cat("Warnings during likelihood maximisation:\n") - print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), - sum)) - } - } - param <- res$par - namb <- colnames(modmatrix.b) - namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") - nam <- c(namb, namphi) - names(param) <- nam - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - H <- H.singular <- Hr.singular <- NA - varparam <- matrix(NA) - is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) - if (hessian) { - H <- res$hessian - if (is.null(unlist(fixpar))) { - H.singular <- is.singular(H) - if (!H.singular) - varparam <- qr.solve(H) - else warning("The hessian matrix was singular.\n") - } - else { - idparam <- 1:(nb.b + nb.phi) - idestim <- idparam[-fixpar[[1]]] - Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) - H.singular <- is.singular(Hr) - if (!H.singular) { - Vr <- solve(Hr) - dimnames(Vr) <- list(idestim, idestim) - varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), - ncol = NCOL(H)) - varparam[idestim, idestim] <- Vr - } - } - } - else varparam <- matrix(NA) - if (any(!is.na(varparam))) - dimnames(varparam) <- list(nam, nam) - nbpar <- if (is.null(unlist(fixpar))) - sum(!is.na(param)) - else sum(!is.na(param[-fixpar[[1]]])) - logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) - logL <- -res$value - dev <- -2 * (logL - logL.max) - df.residual <- sum(n > 0) - nbpar - iterations <- res$counts[1] - code <- res$convergence - msg <- if (!is.null(res$message)) - res$message - else character(0) - if (code != 0) - warning("\nPossible convergence problem. Optimization process code: ", - code, " (see ?optim).\n") - new(Class = "glimML", CALL = CALL, link = link, method = "BB", - data = data, formula = formula, random = random, param = param, - varparam = varparam, fixed.param = param[seq(along = namb)], - random.param = param[-seq(along = namb)], logL = logL, - logL.max = logL.max, dev = dev, df.residual = df.residual, - nbpar = nbpar, iterations = iterations, code = code, - msg = msg, singular.hessian = as.numeric(H.singular), - param.ini = param.ini, na.action = na.action) -} - -invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - - exp(-exp(x))) -} - -link <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = qlogis(x), probit = qnorm(x), - log = log(x), cloglog = log(-log(1 - x))) -} - - -pr <- function (object, ...) -{ - .local <- function (object, newdata = NULL, type = c("response", - "link"), se.fit = FALSE, ...) - { - type <- match.arg(type) - mf <- object@CALL - b <- coef(object) - f <- object@formula[-2] - data <- object@data - offset <- NULL - if (is.null(newdata)) { - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - } - else { - mfb <- model.frame(f, newdata) - offset <- model.offset(mfb) - X <- model.matrix(object = f, data = newdata) - } - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - varparam <- object@varparam - varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) - vareta <- X %*% varb %*% t(X) - if (type == "response") { - p <- invlink(eta, type = object@link) - J <- switch(object@link, logit = diag(p * (1 - p), - nrow = length(p)), probit = diag(dnorm( qnorm(p) ), - nrow = length(p)), cloglog = diag(-(1 - p) * - log(1 - p), nrow = length(p)), log = diag(p, - nrow = length(p))) - varp <- J %*% vareta %*% J - se.p <- sqrt(diag(varp)) - } - se.eta <- sqrt(diag(vareta)) - if (!se.fit) - res <- switch(type, response = p, link = eta) - else res <- switch(type, response = list(fit = p, se.fit = se.p), - link = list(fit = eta, se.fit = se.eta)) - res - } - .local(object, ...) -} - -setMethod(predict, "glimML", pr) -setMethod(fitted, "glimML", function (object, ...) { - mf <- object@CALL - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - b <- coef(object) - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - invlink(eta, type = object@link) -} -) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R deleted file mode 100644 index 410fdd1..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/SLN_logdensities-checkpoint.R +++ /dev/null @@ -1,14 +0,0 @@ -library('aod') -args <- commandArgs(trailingOnly=TRUE) -source(paste(args,'multiarea_model/data_multiarea/bbAlt.R', sep="")) -f <- file(paste(args,'multiarea_model/data_multiarea/raw_data/RData_prepared_logdensities.txt', sep=""),'r') -x <- read.table(f) -close(f) - - -dens <- data.matrix(x)[,7] - -m2.bb <- betabin(cbind(S, I) ~ dens , ~ 1, data = x, "probit", control = list(maxit = 100000)) -h2.bb <- c(coef(m2.bb)) - -print(h2.bb) diff --git a/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R b/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R deleted file mode 100644 index 6aa3d5e..0000000 --- a/multiarea_model/data_multiarea/.ipynb_checkpoints/bbAlt-checkpoint.R +++ /dev/null @@ -1,289 +0,0 @@ -# Script provided by Kenneth Knoblauch -# This code is based on functions from the aod package of R written by -# Matthieu Lesnoff and Renaud Lancelot -# (https://cran.r-project.org/web/packages/aod/index.html), published -# under the GPL-3 license -# (https://cran.r-project.org/web/licenses/GPL-3). - -betabin <- function (formula, random, data = NULL, link = c("logit", "probit", "cloglog"), - phi.ini = NULL, warnings = FALSE, na.action = na.omit, fixpar = list(), - hessian = TRUE, control = list(maxit = 2000), ...) -{ - CALL <- mf <- match.call(expand.dots = FALSE) - tr <- function(string) gsub("^[[:space:]]+|[[:space:]]+$", - "", string) - link <- match.arg(link) - if (length(formula) != 3) - stop(paste(tr(deparse(formula)), collapse = " "), "is not a valid formula.") - else if (substring(deparse(formula)[1], 1, 5) != "cbind") - stop(paste(tr(deparse(formula)), collapse = ""), " is not a valid formula.\n", - "The response must be a matrix of the form cbind(success, failure)") - if (length(random) == 3) { - form <- deparse(random) - warning("The formula for phi (", form, ") contains a response which is ignored.") - random <- random[-2] - } - explain <- as.character(attr(terms(random), "variables"))[-1] - if (length(explain) > 1) { - warning("The formula for phi contains several explanatory variables (", - paste(explain, collapse = ", "), ").\n", "Only the first one (", - explain[1], ") was considered.") - explain <- explain[1] - } - gf3 <- if (length(explain) == 1) - paste(as.character(formula[3]), explain, sep = " + ") - else as.character(formula[3]) - gf <- formula(paste(formula[2], "~", gf3)) - if (missing(data)) - data <- environment(gf) - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - modmatrix.b <- if (!is.empty.model(mt)) - model.matrix(mt, mfb) - else matrix(, NROW(Y), 0) - Y <- model.response(mfb, "numeric") - weights <- model.weights(mfb) - if (!is.null(weights) && any(weights < 0)) - stop("Negative wts not allowed") - n <- rowSums(Y) - y <- Y[, 1] - if (any(n == 0)) - warning("The data set contains at least one line with weight = 0.\n") - mr <- match(c("random", "data", "na.action"), names(mf), - 0) - mr <- mf[c(1, mr)] - mr$drop.unused.levels <- TRUE - mr[[1]] <- as.name("model.frame") - names(mr)[2] <- "formula" - mr <- eval(mr, parent.frame()) - if (length(explain) == 0) - modmatrix.phi <- model.matrix(object = ~1, data = mr) - else { - express <- paste("model.matrix(object = ~ -1 + ", explain, - ", data = mr", ", contrasts = list(", explain, " = 'contr.treatment'))", - sep = "") - if (is.ordered(data[, match(explain, table = names(mr))])) - warning(explain, " is an ordered factor.\n", "Treatment contrast was used to build model matrix for phi.") - modmatrix.phi <- eval(parse(text = express)) - } - fam <- eval(parse(text = paste("binomial(link =", link, ")"))) - fm <- glm(formula = formula, family = fam, data = data, na.action = na.action) - b <- coef(fm) - if (any(is.na(b))) { - print(nab <- b[is.na(b)]) - stop("Initial values for the fixed effects contain at least one missing value.") - } - nb.b <- ncol(modmatrix.b) - nb.phi <- ncol(modmatrix.phi) - if (!is.null(phi.ini) && !(phi.ini < 1 & phi.ini > 0)) - stop("phi.ini was set to ", phi.ini, ".\nphi.ini should verify 0 < phi.ini < 1") - else if (is.null(phi.ini)) - phi.ini <- rep(0.1, nb.phi) - param.ini <- c(b, phi.ini) - if (!is.null(unlist(fixpar))) - param.ini[fixpar[[1]]] <- fixpar[[2]] - minuslogL <- function(param) { - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - b <- param[1:nb.b] - eta <- as.vector(modmatrix.b %*% b) - p <- invlink(eta, type = link) - phi <- as.vector(modmatrix.phi %*% param[(nb.b + 1):(nb.b + - nb.phi)]) - cnd <- phi == 0 - f1 <- dbinom(x = y[cnd], size = n[cnd], prob = p[cnd], - log = TRUE) - n2 <- n[!cnd] - y2 <- y[!cnd] - p2 <- p[!cnd] - phi2 <- phi[!cnd] - f2 <- lchoose(n2, y2) + lbeta(p2 * (1 - phi2)/phi2 + - y2, (1 - p2) * (1 - phi2)/phi2 + n2 - y2) - lbeta(p2 * - (1 - phi2)/phi2, (1 - p2) * (1 - phi2)/phi2) - fn <- sum(c(f1, f2)) - if (!is.finite(fn)) - fn <- -1e+20 - -fn - } - withWarnings <- function(expr) { - myWarnings <- NULL - wHandler <- function(w) { - myWarnings <<- c(myWarnings, list(w)) - invokeRestart("muffleWarning") - } - val <- withCallingHandlers(expr, warning = wHandler) - list(value = val, warnings = myWarnings) - } - reswarn <- withWarnings(optim(par = param.ini, fn = minuslogL, - hessian = hessian, control = control, ...)) - res <- reswarn$value - if (warnings) { - if (length(reswarn$warnings) > 0) { - v <- unlist(lapply(reswarn$warnings, as.character)) - tv <- data.frame(message = v, freq = rep(1, length(v))) - cat("Warnings during likelihood maximisation:\n") - print(aggregate(tv[, "freq", drop = FALSE], list(warning = tv$message), - sum)) - } - } - param <- res$par - namb <- colnames(modmatrix.b) - namphi <- paste("phi", colnames(modmatrix.phi), sep = ".") - nam <- c(namb, namphi) - names(param) <- nam - if (!is.null(unlist(fixpar))) - param[fixpar[[1]]] <- fixpar[[2]] - H <- H.singular <- Hr.singular <- NA - varparam <- matrix(NA) - is.singular <- function(X) qr(X)$rank < nrow(as.matrix(X)) - if (hessian) { - H <- res$hessian - if (is.null(unlist(fixpar))) { - H.singular <- is.singular(H) - if (!H.singular) - varparam <- qr.solve(H) - else warning("The hessian matrix was singular.\n") - } - else { - idparam <- 1:(nb.b + nb.phi) - idestim <- idparam[-fixpar[[1]]] - Hr <- as.matrix(H[-fixpar[[1]], -fixpar[[1]]]) - H.singular <- is.singular(Hr) - if (!H.singular) { - Vr <- solve(Hr) - dimnames(Vr) <- list(idestim, idestim) - varparam <- matrix(rep(NA, NROW(H) * NCOL(H)), - ncol = NCOL(H)) - varparam[idestim, idestim] <- Vr - } - } - } - else varparam <- matrix(NA) - if (any(!is.na(varparam))) - dimnames(varparam) <- list(nam, nam) - nbpar <- if (is.null(unlist(fixpar))) - sum(!is.na(param)) - else sum(!is.na(param[-fixpar[[1]]])) - logL.max <- sum(dbinom(x = y, size = n, prob = y/n, log = TRUE)) - logL <- -res$value - dev <- -2 * (logL - logL.max) - df.residual <- sum(n > 0) - nbpar - iterations <- res$counts[1] - code <- res$convergence - msg <- if (!is.null(res$message)) - res$message - else character(0) - if (code != 0) - warning("\nPossible convergence problem. Optimization process code: ", - code, " (see ?optim).\n") - new(Class = "glimML", CALL = CALL, link = link, method = "BB", - data = data, formula = formula, random = random, param = param, - varparam = varparam, fixed.param = param[seq(along = namb)], - random.param = param[-seq(along = namb)], logL = logL, - logL.max = logL.max, dev = dev, df.residual = df.residual, - nbpar = nbpar, iterations = iterations, code = code, - msg = msg, singular.hessian = as.numeric(H.singular), - param.ini = param.ini, na.action = na.action) -} - -invlink <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = plogis(x), probit = pnorm(x), log = exp(x), cloglog = 1 - - exp(-exp(x))) -} - -link <- function (x, type = c("cloglog", "log", "logit", "probit")) -{ - switch(type, logit = qlogis(x), probit = qnorm(x), - log = log(x), cloglog = log(-log(1 - x))) -} - - -pr <- function (object, ...) -{ - .local <- function (object, newdata = NULL, type = c("response", - "link"), se.fit = FALSE, ...) - { - type <- match.arg(type) - mf <- object@CALL - b <- coef(object) - f <- object@formula[-2] - data <- object@data - offset <- NULL - if (is.null(newdata)) { - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - } - else { - mfb <- model.frame(f, newdata) - offset <- model.offset(mfb) - X <- model.matrix(object = f, data = newdata) - } - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - varparam <- object@varparam - varb <- as.matrix(varparam[seq(length(b)), seq(length(b))]) - vareta <- X %*% varb %*% t(X) - if (type == "response") { - p <- invlink(eta, type = object@link) - J <- switch(object@link, logit = diag(p * (1 - p), - nrow = length(p)), probit = diag(dnorm( qnorm(p) ), - nrow = length(p)), cloglog = diag(-(1 - p) * - log(1 - p), nrow = length(p)), log = diag(p, - nrow = length(p))) - varp <- J %*% vareta %*% J - se.p <- sqrt(diag(varp)) - } - se.eta <- sqrt(diag(vareta)) - if (!se.fit) - res <- switch(type, response = p, link = eta) - else res <- switch(type, response = list(fit = p, se.fit = se.p), - link = list(fit = eta, se.fit = se.eta)) - res - } - .local(object, ...) -} - -setMethod(predict, "glimML", pr) -setMethod(fitted, "glimML", function (object, ...) { - mf <- object@CALL - mb <- match(c("formula", "data", "na.action"), names(mf), - 0) - mfb <- mf[c(1, mb)] - mfb$drop.unused.levels <- TRUE - mfb[[1]] <- as.name("model.frame") - names(mfb)[2] <- "formula" - mfb <- eval(mfb, parent.frame()) - mt <- attr(mfb, "terms") - Y <- model.response(mfb, "numeric") - X <- if (!is.empty.model(mt)) - model.matrix(mt, mfb, contrasts) - else matrix(, NROW(Y), 0) - offset <- model.offset(mfb) - b <- coef(object) - eta <- as.vector(X %*% b) - eta <- if (is.null(offset)) - eta - else eta + offset - invlink(eta, type = object@link) -} -) -- GitLab From 63a2e921f6b348e9d16db838f15276c1a56ce7d4 Mon Sep 17 00:00:00 2001 From: Didi Hou <didi.hou@rwth-aachen.de> Date: Wed, 20 Sep 2023 16:48:01 +0200 Subject: [PATCH 11/11] Update VERSION_LOG.md --- VERSION_LOG.md | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/VERSION_LOG.md b/VERSION_LOG.md index 2ee11f3..798f5bc 100644 --- a/VERSION_LOG.md +++ b/VERSION_LOG.md @@ -1,25 +1,49 @@ ## MAM v1.1.0 +### New features + +* Improved documentation: added in the README.md file the Try It On EBRAINS button and clear and detailed User instruction for users to be able to follow step-by-step instructions without much background knowledge or experience, delete section Testing on EBRAINS + +* In down-scale multi-area mode, separated all external parameters to Parameters to tun and Default parameters. Parameters to tune consist of 4 parameters we decided to expose to users initially, and default parameters will be tuned by us and are not recommended for users to change + +* Added section Extract and visualize interareal connectivity which plots the area-level relative connectivity as heatmaps. Two heatmaps represent the interareal connectivity of full-scale multi-area model (left) and down-scale multi-area model (right). There are small differences between them although we’re calculating relative connectivity as there’s randomness + +* Added section Simulation Results Visualization. The code is written in separate modules saved as .py files in “./figures/MAM2EBRAINS†to avoid displaying contents that are not relevant to users + +* Added 3 plots in the section Simulation Results Visualization + * 3.1. Instantaneous and mean firing rate across all populations (existed in MAM v1.0.0, refined in MAM v1.1.0) + * 3.2 Resting state plots + * 3.3 Time-averaged population rates + +* The 3.2 Resting state plots figure is plotted based on Fig 5. of the paper Schmidt M, Bakker R, Shen K, Bezgin B, Diesmann M & van Albada SJ (2018) A multi-scale layer-resolved spiking network model of resting-state dynamics in macaque cortex. PLOS Computational Biology, 14(9): e1006359. https://doi.org/10.1371/journal.pcbi.1006359, yet there are a few differences: + * This plot provides the option for users to choose 3 areas to plot the raster plots instead of fixing V1, V2, and FEF to plot + * The subplot E Correlation coefficient is replaced as Synchrony + * The subplot G only plots the binned spike histograms (gray), not the convolved histograms (black) + ### Enhancements -* The structure of the notebook has been revamped for improved clarity and user navigation. +* Reconstructed the Jupyter Notebook and added Notebook structure as table of contents that enables users to navigate quickly and easily between different sections. (see the notebook structure for details) + +* Added detailed and easy-to-understand descriptions to the 4 exposed parameters and also brief comments for the default parameters -* Enhanced Parameter Descriptions: Parameters are now accompanied by more detailed descriptions, providing users with a clearer understanding and better control over model configurations. +* Added the model overview diagram and a short description of the down-scaled multi-area model at the beginning of the jupyter notebook -* Visualization Enhancements: Dive deeper into your simulation results with the addition of new plots that offer richer insights into the model's behavior. +* Added descriptions of comparable figures in our publications whenever available so that users can compare the down-scaled model with their costumed parameters and the full-scaled model presented in the paper -* Try it on EBRAINS: For users looking for a seamless experience, we've integrated a "Try it on EBRAINS" button. Dive right in and experience MAM on the EBRAINS platform! +* Removed unnecessary print statements in ./multiarea_model/analysis.py and ./multiarea_model/analysis_helpers.py to avoid multiple print that are not relevant to users -* Improved Documentation: The README.de file has been updated with comprehensive user instructions to guide both new and returning users through the features and functionalities of MAM. +* Updated ./.gitignore file to ignore checkpoint files ### Bug fixes * Corrected the separator from "" to "/" in ./multiarea_model/data_multiarea/SLN_logdensities.R to fix the file path of ./multiarea_model/data_multiarea/bbAlt.R + +* Fixed bugs in ./multiarea-model/analysis.py: change np.nan*np.ones(params['t_max'] - params['t_min']) to np.nan*np.ones(int(params['t_max'] - params['t_min'])) ## MAM v1.0.0 ### Bug fixes -* corrected the URL of NEST logo in README.md +* Corrected the URL of NEST logo in README.md -- GitLab