Lab 4
Modello gerarchico normale
library(dplyr) # optional
library(kableExtra)
library(ggplot2)Esempio
Combinazione dei risultati di studi clinici separati
Riferimento
Gelman: p. 148, Sec 5.6, Hierarchical modelling applied to a meta-analysis
Linguaggio
BUGS, N-N.b; R, N-N.r
Soggetto
Si hanno a disposizione i risultati di 22 studi clinici sui beta-bloccanti per ridurre la mortalità dopo un infarto miocardico.
Considerando i 22 studi come scambiabili, analizzare i dati per stimare: l’effetto medio complessivo del trattamento con beta-bloccanti; la dimensione dell’effetto in ognuno dei 22 studi osservati; la dimensione dell’effetto in un altro studio non osservato comparabile (scambiabile).
Confronta la meta-analisi con metodi più semplici: l’analisi separata, supponendo che non ci sia similarità tra gli studi; il pool completo, assumendo che gli studi siano replicazioni identiche l’uno dell’altro.
A priori gerarchiche per mettere in comune le forze: uno studio di meta-analisi
Dati
# In BUGS, within a list structure, y:
y = structure(.Data = c(3, 39, 3, 38, 14, 116, 7, 114, 11, 93, 5, 69, 127, 1520, 102,
1533, 27, 365, 28, 355, 6, 52, 4, 59, 152, 939, 98, 945, 48, 471, 60, 632, 37, 282,
25, 278, 188, 1921, 138, 1916, 52, 583, 64, 873, 47, 266, 45, 263, 16, 293, 9, 291,
45, 883, 57, 858, 31, 147, 25, 154, 38, 213, 33, 207, 12, 122, 28, 251, 6, 154, 8,
151, 3, 134, 6, 174, 40, 218, 32, 209, 43, 364, 27, 391, 39, 674, 22, 680), .Dim = c(22,
4))# in R, reverse the dims and transpose the output
y = structure(.Data = c(3, 39, 3, 38, 14, 116, 7, 114, 11, 93, 5, 69, 127, 1520, 102,
1533, 27, 365, 28, 355, 6, 52, 4, 59, 152, 939, 98, 945, 48, 471, 60, 632, 37, 282,
25, 278, 188, 1921, 138, 1916, 52, 583, 64, 873, 47, 266, 45, 263, 16, 293, 9, 291,
45, 883, 57, 858, 31, 147, 25, 154, 38, 213, 33, 207, 12, 122, 28, 251, 6, 154, 8,
151, 3, 134, 6, 174, 40, 218, 32, 209, 43, 364, 27, 391, 39, 674, 22, 680), .Dim = c(4,
22))
y <- t(y)
y
[,1] [,2] [,3] [,4]
[1,] 3 39 3 38
[2,] 14 116 7 114
[3,] 11 93 5 69
[4,] 127 1520 102 1533
[5,] 27 365 28 355
[6,] 6 52 4 59
[7,] 152 939 98 945
[8,] 48 471 60 632
[9,] 37 282 25 278
[10,] 188 1921 138 1916
[11,] 52 583 64 873
[12,] 47 266 45 263
[13,] 16 293 9 291
[14,] 45 883 57 858
[15,] 31 147 25 154
[16,] 38 213 33 207
[17,] 12 122 28 251
[18,] 6 154 8 151
[19,] 3 134 6 174
[20,] 40 218 32 209
[21,] 43 364 27 391
[22,] 39 674 22 680
(n <- nrow(y)) %>%
print
[1] 22
names(y) <- c("control.dead", "control.total", "treated.dead", "treated.total")Nell’es. beta-blockers i valori \(y_j\) sono i empirical logits e i valori \(\sigma^2_j\) sono le varianze campionarie approssimate (calcolo secondo (5.23) and (5.24), p. 150, Gelman).
z <- log(y[, 3]/(y[, 4] - y[, 3])) - log(y[, 1]/(y[, 2] - y[, 1]))
sigma <- 1/y[, 3] + 1/(y[, 4] - y[, 3]) + 1/y[, 1] + 1/(y[, 2] - y[, 1])par(mar = c(4, 4, 1, 1))
plot(density(z, bw = 0.2), xlab = "z", main = "")Tabella riassuntiva (dove aggiunte altre possibili trasformazioni)
p1 <- y[, 3]/y[, 4]
p0 <- y[, 1]/y[, 2]
riskdiff <- p1 - p0
riskratio <- p1/p0
OR <- (p1/(1 - p1))/(p0/(1 - p0))
kbl(cbind(y, p1 = p1 * 100, p0 = p0 * 100, `p1-p0` = riskdiff * 100, `p1/p0` = riskratio,
OR = OR, LOR = z, sd = sqrt(sigma)) %>%
round(2))| p1 | p0 | p1-p0 | p1/p0 | OR | LOR | sd | ||||
|---|---|---|---|---|---|---|---|---|---|---|
| 3 | 39 | 3 | 38 | 7.89 | 7.69 | 0.20 | 1.03 | 1.03 | 0.03 | 0.85 |
| 14 | 116 | 7 | 114 | 6.14 | 12.07 | -5.93 | 0.51 | 0.48 | -0.74 | 0.48 |
| 11 | 93 | 5 | 69 | 7.25 | 11.83 | -4.58 | 0.61 | 0.58 | -0.54 | 0.56 |
| 127 | 1520 | 102 | 1533 | 6.65 | 8.36 | -1.70 | 0.80 | 0.78 | -0.25 | 0.14 |
| 27 | 365 | 28 | 355 | 7.89 | 7.40 | 0.49 | 1.07 | 1.07 | 0.07 | 0.28 |
| 6 | 52 | 4 | 59 | 6.78 | 11.54 | -4.76 | 0.59 | 0.56 | -0.58 | 0.68 |
| 152 | 939 | 98 | 945 | 10.37 | 16.19 | -5.82 | 0.64 | 0.60 | -0.51 | 0.14 |
| 48 | 471 | 60 | 632 | 9.49 | 10.19 | -0.70 | 0.93 | 0.92 | -0.08 | 0.20 |
| 37 | 282 | 25 | 278 | 8.99 | 13.12 | -4.13 | 0.69 | 0.65 | -0.42 | 0.27 |
| 188 | 1921 | 138 | 1916 | 7.20 | 9.79 | -2.58 | 0.74 | 0.72 | -0.33 | 0.12 |
| 52 | 583 | 64 | 873 | 7.33 | 8.92 | -1.59 | 0.82 | 0.81 | -0.21 | 0.19 |
| 47 | 266 | 45 | 263 | 17.11 | 17.67 | -0.56 | 0.97 | 0.96 | -0.04 | 0.23 |
| 16 | 293 | 9 | 291 | 3.09 | 5.46 | -2.37 | 0.57 | 0.55 | -0.59 | 0.43 |
| 45 | 883 | 57 | 858 | 6.64 | 5.10 | 1.55 | 1.30 | 1.33 | 0.28 | 0.21 |
| 31 | 147 | 25 | 154 | 16.23 | 21.09 | -4.85 | 0.77 | 0.73 | -0.32 | 0.30 |
| 38 | 213 | 33 | 207 | 15.94 | 17.84 | -1.90 | 0.89 | 0.87 | -0.14 | 0.26 |
| 12 | 122 | 28 | 251 | 11.16 | 9.84 | 1.32 | 1.13 | 1.15 | 0.14 | 0.36 |
| 6 | 154 | 8 | 151 | 5.30 | 3.90 | 1.40 | 1.36 | 1.38 | 0.32 | 0.55 |
| 3 | 134 | 6 | 174 | 3.45 | 2.24 | 1.21 | 1.54 | 1.56 | 0.44 | 0.72 |
| 40 | 218 | 32 | 209 | 15.31 | 18.35 | -3.04 | 0.83 | 0.80 | -0.22 | 0.26 |
| 43 | 364 | 27 | 391 | 6.91 | 11.81 | -4.91 | 0.58 | 0.55 | -0.59 | 0.26 |
| 39 | 674 | 22 | 680 | 3.24 | 5.79 | -2.55 | 0.56 | 0.54 | -0.61 | 0.27 |
n_mean <- apply(y[, c(2, 4)], 1, mean)
cbind(study = paste(1:22), n_mean = n_mean %>%
round(1), sd = sqrt(sigma) %>%
round(2))[order(n_mean), ] %>%
t %>%
kbl| study | 1 | 6 | 3 | 2 | 15 | 18 | 19 | 17 | 16 | 20 | 12 | 9 | 13 | 5 | 21 | 8 | 22 | 11 | 14 | 7 | 4 | 10 |
| n_mean | 38.5 | 55.5 | 81 | 115 | 150.5 | 152.5 | 154 | 186.5 | 210 | 213.5 | 264.5 | 280 | 292 | 360 | 377.5 | 551.5 | 677 | 728 | 870.5 | 942 | 1526.5 | 1918.5 |
| sd | 0.85 | 0.68 | 0.56 | 0.48 | 0.3 | 0.55 | 0.72 | 0.36 | 0.26 | 0.26 | 0.23 | 0.27 | 0.43 | 0.28 | 0.26 | 0.2 | 0.27 | 0.19 | 0.21 | 0.14 | 0.14 | 0.12 |
par(mar = c(4, 4, 0.5, 0.5))
ylims <- range(c(sqrt(sigma), z))
plot(sqrt(n_mean), sqrt(sigma), ylim = ylims, type = "n")
abline(v = sqrt(n_mean), col = 8, lty = 3)
text(sqrt(n_mean), sqrt(sigma), lab = paste(1:22), col = 4, pch = 20)
text(sqrt(n_mean), z, lab = paste(1:22), col = 2, pch = 3)Analisi separate
Se assumiamo una a priori \(p(\theta_j)\propto 1\), 95%CI sono:
df_sep <- data.frame(x = (1:22), y = z, lower = z - 1.96 * sqrt(sigma), upper = z + 1.96 *
sqrt(sigma))
ggplot(df_sep, aes(x, y)) + geom_point() + geom_errorbar(aes(ymin = lower, ymax = upper)) +
xlim(c(1, 22)) Con gli studi ordinati secondo numerositÃ
df_sep_ord <- df_sep %>% mutate(n=n_mean) %>% arrange(n_mean) %>% # First sort by val. This sort the dataframe but NOT the factor levels
mutate(x=factor(x, levels=x)) # This trick update the factor levels
df_sep_ord
x y lower upper n
1 1 0.02817088 -1.6384238 1.69476554 38.5
2 6 -0.58415690 -1.9085537 0.74023994 55.5
3 3 -0.54062120 -1.6471609 0.56591852 81.0
4 2 -0.74100320 -1.6879803 0.20597386 115.0
5 15 -0.32133359 -0.9048434 0.26217619 150.5
6 18 0.32204972 -0.7611344 1.40523380 152.5
7 19 0.44438052 -0.9602518 1.84901281 154.0
8 17 0.14060645 -0.5731749 0.85438785 186.5
9 16 -0.13534792 -0.6467548 0.37605898 210.0
10 20 -0.21750973 -0.7267994 0.29177995 213.5
11 12 -0.03890844 -0.4886513 0.41083440 264.5
12 9 -0.42417337 -0.9611605 0.11281380 280.0
13 13 -0.59325371 -1.4265818 0.24007440 292.0
14 5 0.06945337 -0.4806331 0.61953988 360.0
15 21 -0.59107599 -1.0952014 -0.08695055 377.5
16 8 -0.07862326 -0.4784456 0.32119905 551.5
17 22 -0.60809913 -1.1419613 -0.07423696 677.0
18 11 -0.21339753 -0.5953467 0.16855162 728.0
19 14 0.28154593 -0.1211272 0.68421904 870.5
20 7 -0.51238549 -0.7842136 -0.24055734 942.0
21 4 -0.24612808 -0.5169673 0.02471119 1526.5
22 10 -0.33482340 -0.5642773 -0.10536951 1918.5
ggplot(df_sep_ord, aes(x, y)) +
geom_hline(yintercept=0, linetype="dotted", color = "green", size=.75) +
geom_point() +
geom_errorbar(aes(ymin = lower, ymax = upper), width=.5) op <- options()
utils::str(op) # op is a named list
List of 79
$ add.smooth : logi TRUE
$ browserNLdisabled : logi FALSE
$ CBoundsCheck : logi FALSE
$ check.bounds : logi FALSE
$ citation.bibtex.max : int 1
$ continue : chr "+ "
$ contrasts : Named chr [1:2] "contr.treatment" "contr.poly"
..- attr(*, "names")= chr [1:2] "unordered" "ordered"
$ cpp11_preserve_xptr :<externalptr>
$ cpp11_should_unwind_protect : logi TRUE
$ defaultPackages : chr [1:6] "datasets" "utils" "grDevices" "graphics" ...
$ demo.ask : chr "default"
$ deparse.cutoff : int 60
$ device :function (width = 7, height = 7, ...)
$ device.ask.default : logi FALSE
$ digits : int 7
$ dplyr.show_progress : logi TRUE
$ echo : logi FALSE
$ editor : chr "notepad"
$ encoding : chr "native.enc"
$ example.ask : chr "default"
$ expressions : int 5000
$ fansi.css : chr "PRE.fansi SPAN {padding-top: .25em; padding-bottom: .25em};"
$ fansi.ctrl : chr "all"
$ fansi.tab.stops : int 8
$ fansi.tabs.as.spaces : logi FALSE
$ fansi.term.cap : chr [1:2] "bright" "256"
$ fansi.warn : logi TRUE
$ help.search.types : chr [1:3] "vignette" "demo" "help"
$ help.try.all.packages : logi FALSE
$ help_type : chr "html"
$ htmltools.preserve.raw : logi TRUE
$ HTTPUserAgent : chr "R (4.1.1 x86_64-w64-mingw32 x86_64 mingw32)"
$ httr_oauth_cache : logi NA
$ httr_oob_default : logi FALSE
$ install.packages.compile.from.source: chr "interactive"
$ internet.info : int 2
$ keep.parse.data : logi TRUE
$ keep.parse.data.pkgs : logi FALSE
$ keep.source : logi FALSE
$ keep.source.pkgs : logi FALSE
$ knitr.in.progress : logi TRUE
$ knitr.table.format : chr "html"
$ locatorBell : logi TRUE
$ mailer : chr "mailto"
$ matprod : chr "default"
$ max.print : int 999999
$ menu.graphics : logi TRUE
$ na.action : chr "na.omit"
$ nwarnings : int 50
$ OutDec : chr "."
$ pager : chr "internal"
$ papersize : chr "a4"
$ PCRE_limit_recursion : logi NA
$ PCRE_study : logi FALSE
$ PCRE_use_JIT : logi TRUE
$ pdfviewer : chr "C:/PROGRA~2/R/R-41~1.1/bin/x64/open.exe"
$ pkgType : chr "both"
$ prompt : chr "> "
$ repos : Named chr "@CRAN@"
..- attr(*, "names")= chr "CRAN"
$ scipen : num 0
$ show.coef.Pvalues : logi TRUE
$ show.error.messages : logi TRUE
$ show.signif.stars : logi TRUE
$ showErrorCalls : logi TRUE
$ str :List of 7
..$ strict.width : chr "no"
..$ digits.d : int 3
..$ vec.len : int 4
..$ list.len : int 99
..$ deparse.lines : NULL
..$ drop.deparse.attr: logi TRUE
..$ formatNum :function (x, ...)
$ str.dendrogram.last : chr "`"
$ stringsAsFactors : logi FALSE
$ tikzMetricsDictionary : chr "lab_4_NN-tikzDictionary"
$ timeout : int 60
$ try.outFile : 'textConnection' int 3
..- attr(*, "conn_id")=<externalptr>
$ ts.eps : num 1e-05
$ ts.S.compat : logi FALSE
$ unzip : chr "internal"
$ useFancyQuotes : logi FALSE
$ verbose : logi FALSE
$ warn : int 0
$ warning.length : int 1000
$ width : int 85
$ windowsTimeouts : int [1:2] 100 500
options(digits = 4)
123.456
[1] 123.5
3.4567
[1] 3.457
12.234
[1] 12.23
0.2345
[1] 0.2345
options(op) # reset (all) initial options
options("digits")
$digits
[1] 7
options(digits = 2)Analisi pooled
Se assumiamo una a priori \(p(\mu)\propto 1\), 95%CI è:
v_pool <- (sum(1/sigma))^(-1)
sqrt(v_pool)
[1] 0.05
m_pool <- sum(z * 1/sigma) * v_pool
m_pool
[1] -0.26
e <- qnorm(0.975) * sqrt(v_pool)
q_pool <- c(m_pool - e, m_pool + e)
q_pool
[1] -0.36 -0.16
exp(q_pool)
[1] 0.70 0.85
ggplot(df_sep_ord, aes(x, y)) + geom_hline(yintercept = 0, linetype = "dotted", color = "green",
size = 0.75) + geom_point() + geom_errorbar(aes(ymin = lower, ymax = upper), width = 0.5) +
geom_hline(yintercept = c(m_pool, q_pool), color = "magenta", size = c(0.75, 0.5,
0.5))Predictive check
q_y_rep <- qnorm(c(0.025, 0.975), m_pool, sqrt(mean(sigma)))
ggplot(df_sep_ord, aes(x, y)) + geom_hline(yintercept = 0, linetype = "dotted", color = "green",
size = 0.75) + geom_point() + geom_errorbar(aes(ymin = lower, ymax = upper), width = 0.5) +
geom_hline(yintercept = c(m_pool, q_pool), color = "magenta", size = c(0.75, 0.5,
0.5)) + geom_hline(yintercept = q_y_rep, color = "dodgerblue1", size = 0.75)Calcolo e simulazione
Simulazione della a posteriori nel modello gerarchico Normale-Normale.
Calcolo della distribuzione a posteriori di \((\theta,\mu,\tau)\) via simulazione secondo la fattorizzazione: \[ p(\theta,\mu,\tau|y) \propto p(\tau|y)p(\mu|\tau,y)p(\theta|\mu,\tau,y) \]
# 1) Calcolo della densità $p(\tau|y)$ secondo (5.21) con $p(\tau) \propto 1$, p.
# 117, BDA3, Gelman. Arguments: e=c(y1,...,y_n); v=c(sigma2_1,...,sigma2_n);
# x.tau=seq(0,1,.001).
dtau.r <- function(e, v, x.tau) {
n <- length(e)
d.tau <- c()
for (i in 1:length(x.tau)) {
prec <- 1/(x.tau[i]^2 + v)
v.mu <- (sum(prec))^(-1)
mu.bar <- (prec %*% e) * v.mu
d.tau[i] <- sqrt(v.mu)
for (j in 1:n) {
d.tau[i] <- d.tau[i] * (v[j] + x.tau[i]^2)^(-1/2) * exp(-((e[j] - mu.bar)^2)/(2 *
(v[j] + x.tau[i]^2)))
}
}
d.tau
}
# Estrai tau da p(tau|y) Metodo Inverse cdf Arguments: d.tau=dtau.r output;
# m=#draws; x.tau=seq(0,1,.001).
tau.r <- function(d.tau, m, x.tau) {
cn <- sum(d.tau)
dn.tau <- d.tau/cn
f.tau <- cumsum(dn.tau)
tau <- c()
for (i in 1:m) {
u <- max(runif(1), f.tau[1])
# cat(u,'\n')
tau[i] <- x.tau[length(f.tau[f.tau < u | f.tau == u])]
}
tau
}
# Draw mu from p(mu|tau,y), with (mu|tau) propto 1 mu|tau,y ~ N(mu.bar,v.mu), as in
# (5.20), p.117, Gelman's book Arguments: e,v as above; tau=tau.r output
mu.r <- function(e, v, tau) {
mu <- c()
for (i in 1:length(tau)) {
prec <- 1/(tau[i]^2 + v)
v.mu <- (sum(prec))^(-1)
mu.bar <- (prec %*% e) * v.mu
mu[i] <- rnorm(1, mu.bar, sqrt(v.mu))
}
mu
}
# Draw from p(theta|mu,tau,y) theta|mu,tau,y ~ N(theta_j.bar,v_j.theta), as in
# (5.17), p.138, Gelman's book Arguments: e,v,mu,tau as above
theta.r <- function(e, v, mu, tau) {
n <- length(e)
m <- length(tau)
theta <- matrix(NA, m, n)
for (i in 1:m) {
for (j in 1:n) {
theta.bar <- ifelse(tau[i] == 0, mu[i], (1/v[j] * e[j] + 1/tau[i]^2 * mu[i])/(1/v[j] +
1/tau[i]^2))
# theta.bar <- (1/v[j]*e[j] + 1/tau[i]^2*mu[i])/(1/v[j] + 1/tau[i]^2)
v.theta <- (1/v[j] + 1/tau[i]^2)^(-1)
# cat(theta.bar, v.theta, ' ')
theta[i, j] <- rnorm(1, theta.bar, sqrt(v.theta))
}
}
theta
}Si noti che quando \(\tau\) campionato è 0, allora \(\bar\theta_j=NA\) (perchè il peso \(1/\tau\) è \(\infty\), da cui \(\infty/\infty={\rm NaN}\)), e \(V_j=0\) (perchè \((1/\tau)^{(-1)}=0\)), allora l’algoritmo theta.r è stato opportunamente modificato.
Make the simulation repicable
set.seed(2222)Computation of the nonnormalized posterior density of \(\tau\)
dtau <- dtau.r(e = z, v = sigma, x.tau = seq(0, 1, 0.001))Simulation of \(\tau\)
tau <- tau.r(dtau, 1000, x.tau = seq(0, 1, 0.001))Simulation of \(\mu|\tau\)
mu <- mu.r(e = z, v = sigma, tau)Simulation of \(\theta|\mu,\tau\)
theta <- theta.r(e = z, v = sigma, mu, tau)
q_theta <- apply(theta, 2, quantile, c(0.025, 0.25, 0.5, 0.75, 0.975)) %>%
t()kbl(cbind(y, LOR = z, sd = sqrt(sigma), q_theta) %>%
round(2))| LOR | sd | 2.5% | 25% | 50% | 75% | 97.5% | ||||
|---|---|---|---|---|---|---|---|---|---|---|
| 3 | 39 | 3 | 38 | 0.03 | 0.85 | -0.59 | -0.32 | -0.24 | -0.15 | 0.12 |
| 14 | 116 | 7 | 114 | -0.74 | 0.48 | -0.62 | -0.36 | -0.28 | -0.21 | 0.00 |
| 11 | 93 | 5 | 69 | -0.54 | 0.56 | -0.61 | -0.34 | -0.26 | -0.19 | 0.04 |
| 127 | 1520 | 102 | 1533 | -0.25 | 0.14 | -0.44 | -0.31 | -0.25 | -0.19 | -0.05 |
| 27 | 365 | 28 | 355 | 0.07 | 0.28 | -0.43 | -0.29 | -0.22 | -0.12 | 0.16 |
| 6 | 52 | 4 | 59 | -0.58 | 0.68 | -0.60 | -0.34 | -0.26 | -0.18 | 0.06 |
| 152 | 939 | 98 | 945 | -0.51 | 0.14 | -0.62 | -0.43 | -0.35 | -0.28 | -0.17 |
| 48 | 471 | 60 | 632 | -0.08 | 0.20 | -0.42 | -0.29 | -0.22 | -0.13 | 0.08 |
| 37 | 282 | 25 | 278 | -0.42 | 0.27 | -0.59 | -0.35 | -0.28 | -0.20 | -0.03 |
| 188 | 1921 | 138 | 1916 | -0.33 | 0.12 | -0.49 | -0.35 | -0.29 | -0.24 | -0.13 |
| 52 | 583 | 64 | 873 | -0.21 | 0.19 | -0.45 | -0.30 | -0.24 | -0.17 | 0.01 |
| 47 | 266 | 45 | 263 | -0.04 | 0.23 | -0.43 | -0.28 | -0.21 | -0.12 | 0.12 |
| 16 | 293 | 9 | 291 | -0.59 | 0.43 | -0.62 | -0.36 | -0.28 | -0.21 | 0.03 |
| 45 | 883 | 57 | 858 | 0.28 | 0.21 | -0.35 | -0.22 | -0.12 | 0.00 | 0.26 |
| 31 | 147 | 25 | 154 | -0.32 | 0.30 | -0.55 | -0.34 | -0.26 | -0.19 | 0.01 |
| 38 | 213 | 33 | 207 | -0.14 | 0.26 | -0.48 | -0.31 | -0.24 | -0.15 | 0.05 |
| 12 | 122 | 28 | 251 | 0.14 | 0.36 | -0.45 | -0.30 | -0.22 | -0.12 | 0.16 |
| 6 | 154 | 8 | 151 | 0.32 | 0.55 | -0.48 | -0.30 | -0.23 | -0.12 | 0.15 |
| 3 | 134 | 6 | 174 | 0.44 | 0.72 | -0.48 | -0.31 | -0.23 | -0.14 | 0.20 |
| 40 | 218 | 32 | 209 | -0.22 | 0.26 | -0.54 | -0.32 | -0.25 | -0.17 | 0.06 |
| 43 | 364 | 27 | 391 | -0.59 | 0.26 | -0.66 | -0.40 | -0.30 | -0.24 | -0.09 |
| 39 | 674 | 22 | 680 | -0.61 | 0.27 | -0.65 | -0.38 | -0.29 | -0.23 | -0.04 |
Marginal posterior density of \(\tau\)
df_sim <- data.frame(tau = tau)
ggplot(df_sim, aes(x = tau)) + geom_density(adjust = 2)
quantile(tau, c(0.025, 0.25, 0.5, 0.75, 0.975))
2.5% 25% 50% 75% 97.5%
0.007 0.068 0.122 0.183 0.316
c(mean(tau), sd(tau))
[1] 0.132 0.083Marginal posterior density of \(\mu\)
df_sim <- df_sim %>%
mutate(mu = mu)
ggplot(df_sim, aes(x = mu)) + geom_density(adjust = 2)
quantile(mu, c(0.025, 0.25, 0.5, 0.75, 0.975)) %>%
round(2)
2.5% 25% 50% 75% 97.5%
-0.38 -0.29 -0.25 -0.20 -0.11
c(mean(mu), sd(mu)) %>%
round(2)
[1] -0.25 0.07On the odds as well as probability scale
quantile(exp(mu), c(0.025, 0.5, 0.975))
2.5% 50% 97.5%
0.69 0.78 0.90
quantile(1/(1 + exp(-mu)), c(0.025, 0.5, 0.975))
2.5% 50% 97.5%
0.41 0.44 0.47 Marginal posterior density of \(\theta\)
q_mu <- quantile(mu, c(0.025, 0.5, 0.975))
q_theta <- q_theta %>%
as.data.frame()
names(q_theta) <- c("q025", "I", "Me", "II", "q975")
q_theta <- cbind(x = df_sep_ord$x, q_theta)
q_theta
x q025 I Me II q975
1 1 -0.59 -0.32 -0.24 -0.15382 0.1165
2 6 -0.62 -0.36 -0.28 -0.20932 0.0012
3 3 -0.61 -0.34 -0.26 -0.18602 0.0359
4 2 -0.44 -0.31 -0.25 -0.19025 -0.0542
5 15 -0.43 -0.29 -0.22 -0.12089 0.1610
6 18 -0.60 -0.34 -0.26 -0.18077 0.0557
7 19 -0.62 -0.43 -0.35 -0.27644 -0.1689
8 17 -0.42 -0.29 -0.22 -0.12801 0.0811
9 16 -0.59 -0.35 -0.28 -0.20494 -0.0297
10 20 -0.49 -0.35 -0.29 -0.23541 -0.1310
11 12 -0.45 -0.30 -0.24 -0.17252 0.0071
12 9 -0.43 -0.28 -0.21 -0.12162 0.1186
13 13 -0.62 -0.36 -0.28 -0.20625 0.0343
14 5 -0.35 -0.22 -0.12 0.00099 0.2611
15 21 -0.55 -0.34 -0.26 -0.19133 0.0139
16 8 -0.48 -0.31 -0.24 -0.15459 0.0498
17 22 -0.45 -0.30 -0.22 -0.11717 0.1630
18 11 -0.48 -0.30 -0.23 -0.11809 0.1510
19 14 -0.48 -0.31 -0.23 -0.13938 0.1972
20 7 -0.54 -0.32 -0.25 -0.16975 0.0649
21 4 -0.66 -0.40 -0.30 -0.23507 -0.0937
22 10 -0.65 -0.38 -0.29 -0.22847 -0.0373
ggplot(df_sep_ord, aes(x, y)) + geom_hline(yintercept = c(m_pool, q_pool), color = "magenta",
size = c(0.75, 0.5, 0.5)) + geom_hline(yintercept = q_mu, color = "green3", size = c(0.75,
1, 0.75)) + geom_point() + geom_errorbar(aes(ymin = lower, ymax = upper), width = 0.5) +
geom_point(data = q_theta, aes(x = x, y = Me), col = 2, size = 1.75) + geom_segment(data = q_theta,
aes(x = x, y = q025, xend = x, yend = q975), width = 0.5, col = 2, size = 1.25, alpha = 0.65)
Warning: Ignoring unknown parameters: width\(p(\mu|\tau,y)\)
mean(tau)
[1] 0.13
summary(tau)
Min. 1st Qu. Median Mean 3rd Qu. Max.
0.00 0.07 0.12 0.13 0.18 0.46
tau_val <- seq(0, 0.3, by = 0.05)
l <- 1000
mu_tau_val <- mu.r(e = z, v = sigma, tau = rep(tau_val, each = l))
mu_cond_tau <- data.frame(mean = rep(NA, 7), sd = rep(NA, 7), row.names = paste("tau =",
tau_val))
for (i in 1:7) {
temp <- mu_tau_val[(i - 1) * l + (1:l)]
mean(temp)
mu_cond_tau[i, ] <- c(mean(temp), sd(temp))
}
mu_cond_tau <- mu_cond_tau %>%
rbind(marginal = c(mean(mu_tau_val), sd(mu_tau_val)))
mu_cond_tau[, 1] <- mu_cond_tau[, 1]
mu_cond_tau[, 2] <- mu_cond_tau[, 2] %>%
round(2)
mu_cond_tau
mean sd
tau = 0 -0.26 0.05
tau = 0.05 -0.26 0.05
tau = 0.1 -0.25 0.06
tau = 0.15 -0.25 0.06
tau = 0.2 -0.24 0.07
tau = 0.25 -0.24 0.08
tau = 0.3 -0.24 0.09
marginal -0.25 0.07When \(\tau=0\)
quantile(mu_tau_val[1:l], c(0.025, 0.975))
2.5% 97.5%
-0.36 -0.17
quantile(mu_tau_val[1:l], c(0.025, 0.975)) %>%
exp
2.5% 97.5%
0.70 0.84 Effect size in another, comparable (exchangeable) unobserved study.
2.5% 50% 97.5%
-0.58 -0.25 0.13