Bayesian GLMM Part10

Author

Murray Logan

Published

09/09/2025

1 Preparations

Load the necessary libraries

library(broom) # for tidy output
library(knitr) # for kable
library(effects) # for partial effects plots
library(emmeans) # for estimating marginal means
library(MASS) # for glm.nb
library(tidyverse) # for data wrangling
library(brms)
library(tidybayes)
library(bayesplot)
library(broom.mixed)
library(rstan)
library(patchwork)
library(modelsummary)
library(DHARMa)
source("helperFunctions.R")

2 Original data

ltmp <- read_csv(file = "../data/ltmp_full.csv")
glimpse(ltmp)

3 Data processing

  • Make a categorical version of Year
  • Combine Scores 4 and 5 into a single Score of 4 to be consistent across years
  • Make Score an ordered categorical variable
  • Generate a numeric version of categorical Score (as required for the ocat family. Note this must be a 1 indexed numeric.
  • Relevel Sector such that it is in the order North, Central, South
  • Ensure that there is a categorical version of Reef
  • Generate a numeric version of the categorical Reef
coral_fams <- ltmp |>
  filter(HC > 0) |>
  pull(FAMILY_2021) |>
  unique()
ltmp <- ltmp |>
  filter(FAMILY_2021 %in% coral_fams) |>
  dplyr::rename(P_CODE = P_CODE.y) |>
  distinct() |>
  group_by(P_CODE, AIMS_REEF_NAME, REPORT_YEAR, SITE_NO, TRANSECT_NO) |>
  summarise(
    HC = sum(HC, na.rm = TRUE),
    n.points = sum(n.points),
    total.points = unique(total.points)
  ) |>
  ungroup() |>
  dplyr::select(AIMS_REEF_NAME, REPORT_YEAR, SITE_NO, TRANSECT_NO, HC, n.points, total.points) |>
  filter(!is.na(AIMS_REEF_NAME)) |>
  droplevels()
write_csv(ltmp, file = "../data/ltmp.csv")
rm("ltmp")
gc()

4 Scenario

For over 35 years, the AIMS long term monitoring program has performed benthic surveys of coral reefs on the Great Barrier Reef (GBR). To do so the team uses two main survey techniques 1) Manta Tow and 2) Photo transects. The current example focuses on data collected using the later technique. Within each reef, there are three sites on the north east flank and within each site there are five permanent transects. Every year, the team return and take photos every meter along the transects. Once back in the laboratory, five points from every photo are scored according to what is represented underneath the point.

The main objective of long-term monitoring is to be able to report on status and trends. Specifically, what is the status of each major benthic group (such as hard coral, soft coral and macroalgae) and how are they changing (particularly in response to major disturbances).

For this example, we will focus on a single reef (Agincourt Reef No.1).

5 Read in the data

ltmp <- read_csv("../data/ltmp.csv", trim_ws = TRUE)
Rows: 22125 Columns: 7
── Column specification ────────────────────────────────────────────────────────
Delimiter: ","
chr (1): AIMS_REEF_NAME
dbl (6): REPORT_YEAR, SITE_NO, TRANSECT_NO, HC, n.points, total.points

ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
glimpse(ltmp)
Rows: 22,125
Columns: 7
$ AIMS_REEF_NAME <chr> "Arlington Reef", "Arlington Reef", "Arlington Reef", "…
$ REPORT_YEAR    <dbl> 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2…
$ SITE_NO        <dbl> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 1, 1, 1, 1…
$ TRANSECT_NO    <dbl> 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4…
$ HC             <dbl> 10.00000, 10.50000, 10.50000, 8.50000, 25.00000, 19.000…
$ n.points       <dbl> 20, 21, 21, 17, 50, 38, 43, 28, 30, 40, 55, 35, 32, 40,…
$ total.points   <dbl> 200, 200, 200, 200, 200, 200, 200, 200, 196, 200, 200, …
AIMS_REEF_NAME REPORT_YEAR SITE_NO TRANSECT_NO HC n.points total.points
Arlington Reef 2006 1 1 10.0 20 200
Arlington Reef 2006 1 2 10.5 21 200
glimpse(ltmp)
Rows: 22,125
Columns: 7
$ AIMS_REEF_NAME <chr> "Arlington Reef", "Arlington Reef", "Arlington Reef", "…
$ REPORT_YEAR    <dbl> 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2006, 2…
$ SITE_NO        <dbl> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 1, 1, 1, 1…
$ TRANSECT_NO    <dbl> 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4…
$ HC             <dbl> 10.00000, 10.50000, 10.50000, 8.50000, 25.00000, 19.000…
$ n.points       <dbl> 20, 21, 21, 17, 50, 38, 43, 28, 30, 40, 55, 35, 32, 40,…
$ total.points   <dbl> 200, 200, 200, 200, 200, 200, 200, 200, 196, 200, 200, …
## Explore the first 6 rows of the data
head(ltmp)
# A tibble: 6 × 7
  AIMS_REEF_NAME REPORT_YEAR SITE_NO TRANSECT_NO    HC n.points total.points
  <chr>                <dbl>   <dbl>       <dbl> <dbl>    <dbl>        <dbl>
1 Arlington Reef        2006       1           1  10         20          200
2 Arlington Reef        2006       1           2  10.5       21          200
3 Arlington Reef        2006       1           3  10.5       21          200
4 Arlington Reef        2006       1           4   8.5       17          200
5 Arlington Reef        2006       1           5  25         50          200
6 Arlington Reef        2006       2           1  19         38          200
str(ltmp)
spc_tbl_ [22,125 × 7] (S3: spec_tbl_df/tbl_df/tbl/data.frame)
 $ AIMS_REEF_NAME: chr [1:22125] "Arlington Reef" "Arlington Reef" "Arlington Reef" "Arlington Reef" ...
 $ REPORT_YEAR   : num [1:22125] 2006 2006 2006 2006 2006 ...
 $ SITE_NO       : num [1:22125] 1 1 1 1 1 2 2 2 2 2 ...
 $ TRANSECT_NO   : num [1:22125] 1 2 3 4 5 1 2 3 4 5 ...
 $ HC            : num [1:22125] 10 10.5 10.5 8.5 25 ...
 $ n.points      : num [1:22125] 20 21 21 17 50 38 43 28 30 40 ...
 $ total.points  : num [1:22125] 200 200 200 200 200 200 200 200 196 200 ...
 - attr(*, "spec")=
  .. cols(
  ..   AIMS_REEF_NAME = col_character(),
  ..   REPORT_YEAR = col_double(),
  ..   SITE_NO = col_double(),
  ..   TRANSECT_NO = col_double(),
  ..   HC = col_double(),
  ..   n.points = col_double(),
  ..   total.points = col_double()
  .. )
 - attr(*, "problems")=<externalptr> 
ltmp |> datawizard::data_codebook()
ltmp (22125 rows and 7 variables, 7 shown)

ID | Name           | Type      | Missings |                 Values |            N
---+----------------+-----------+----------+------------------------+-------------
1  | AIMS_REEF_NAME | character | 0 (0.0%) |    Agincourt Reef No.1 |  418 ( 1.9%)
   |                |           |          |         Arlington Reef |  135 ( 0.6%)
   |                |           |          |          Border Island |  315 ( 1.4%)
   |                |           |          |             Boult Reef |  120 ( 0.5%)
   |                |           |          |        Broomfield Reef |  435 ( 2.0%)
   |                |           |          |            Carter Reef |  315 ( 1.4%)
   |                |           |          |         Centipede Reef |  105 ( 0.5%)
   |                |           |          |           Chicken Reef |  420 ( 1.9%)
   |                |           |          | Chinaman Reef (22-102) |  419 ( 1.9%)
   |                |           |          |           Corbett Reef |   75 ( 0.3%)
   |                |           |          |                  (...) |             
---+----------------+-----------+----------+------------------------+-------------
2  | REPORT_YEAR    | numeric   | 0 (0.0%) |           [1995, 2023] |        22125
---+----------------+-----------+----------+------------------------+-------------
3  | SITE_NO        | numeric   | 0 (0.0%) |                      1 | 7378 (33.3%)
   |                |           |          |                      2 | 7374 (33.3%)
   |                |           |          |                      3 | 7373 (33.3%)
---+----------------+-----------+----------+------------------------+-------------
4  | TRANSECT_NO    | numeric   | 0 (0.0%) |                      1 | 4421 (20.0%)
   |                |           |          |                      2 | 4427 (20.0%)
   |                |           |          |                      3 | 4426 (20.0%)
   |                |           |          |                      4 | 4427 (20.0%)
   |                |           |          |                      5 | 4424 (20.0%)
---+----------------+-----------+----------+------------------------+-------------
5  | HC             | numeric   | 0 (0.0%) |             [0, 93.97] |        22125
---+----------------+-----------+----------+------------------------+-------------
6  | n.points       | numeric   | 0 (0.0%) |               [0, 239] |        22125
---+----------------+-----------+----------+------------------------+-------------
7  | total.points   | numeric   | 0 (0.0%) |              [45, 458] |        22125
----------------------------------------------------------------------------------
ltmp |> modelsummary::datasummary_skim(by = "SITE_NO")
SITE_NO Unique Missing Pct. Mean SD Min Median Max Histogram
REPORT_YEAR 1.0 29 0 2009.8 8.5 1995.0 2010.0 2023.0
2.0 29 0 2009.8 8.5 1995.0 2010.0 2023.0
3.0 29 0 2009.8 8.5 1995.0 2010.0 2023.0
TRANSECT_NO 1.0 5 0 3.0 1.4 1.0 3.0 5.0
2.0 5 0 3.0 1.4 1.0 3.0 5.0
3.0 5 0 3.0 1.4 1.0 3.0 5.0
HC 1.0 1762 0 26.2 18.0 0.0 22.5 92.5
2.0 1686 0 25.2 17.0 0.0 22.0 94.0
3.0 1633 0 25.1 16.7 0.0 22.7 92.0
n.points 1.0 178 0 51.9 35.9 0.0 44.0 185.0
2.0 180 0 50.1 34.0 0.0 44.0 202.0
3.0 177 0 49.8 33.4 0.0 45.0 239.0
total.points 1.0 125 0 198.6 12.1 65.0 200.0 395.0
2.0 129 0 198.6 12.3 115.0 200.0 410.0
3.0 116 0 198.1 13.0 45.0 200.0 458.0

6 Data preparation

  • restrict to a single reef (Agincourt Reef No.1)
  • declare categorical variables as factors (this includes REPORT_YEAR)
  • declare random effects as factors
  • create unit names for sites and transects
ltmp_sub <- ltmp |>
  filter(AIMS_REEF_NAME == "Agincourt Reef No.1") |>
  droplevels() |>
  mutate(
    REEF_SITE = factor(paste(AIMS_REEF_NAME, SITE_NO)),
    REEF_SITE_TRANSECT = factor(paste(REEF_SITE, TRANSECT_NO))
  )

7 Exploratory Data Analysis

ltmp_sub |>
  ggplot(aes(y = HC, x = REPORT_YEAR)) +
  geom_point()

ltmp_sub |>
  ggplot(aes(y = HC, x = REPORT_YEAR)) +
  geom_line(aes(group = REEF_SITE_TRANSECT, colour = REEF_SITE), alpha = 1) +
  geom_point()

give.n <- function(val, ypos) {
  return(data.frame(y = ypos, label = round(mean(val), 1)))
}
ltmp_sub |>
  ggplot(aes(y = HC, x = factor(REPORT_YEAR))) +
  geom_violin(fill = "orange") +
  geom_line(aes(group = REEF_SITE_TRANSECT), alpha = 0.2) +
  geom_point() +
  stat_summary(
    geom = "text",
    aes(y = total.points, ymax = HC),
    fun.data = give.n,
    fun.args = list(ypos = 0)
  )
Warning in stat_summary(geom = "text", aes(y = total.points, ymax = HC), :
Ignoring unknown aesthetics: ymax

##   fun = mean, aes(y = total.points)
## )

Based on this, it might be better to use the most recent year as the reference…

ltmp_sub <- ltmp_sub |>
  mutate(
    fREPORT_YEAR = factor(REPORT_YEAR, levels = rev(sort(unique(REPORT_YEAR))))
  )

8 Fit the model

In brms, the default priors are designed to be weakly informative. They are chosen to provide moderate regularisation (to help prevent over fitting) and help stabilise the computations.

Unlike rstanarm, brms models must be compiled before they start sampling. For most models, the compilation of the stan code takes around 45 seconds.

ltmp_sub.form <- bf(
    n.points | trials(total.points) ~ fREPORT_YEAR +
        (1 | REEF_SITE) + (1 | REEF_SITE_TRANSECT),
    family = binomial(link = "logit")
)
options(width=150)
ltmp_sub.form %>% get_prior(data = ltmp_sub)
                prior     class             coef              group resp dpar nlpar lb ub       source
               (flat)         b                                                                default
               (flat)         b fREPORT_YEAR1995                                          (vectorized)
               (flat)         b fREPORT_YEAR1996                                          (vectorized)
               (flat)         b fREPORT_YEAR1997                                          (vectorized)
               (flat)         b fREPORT_YEAR1998                                          (vectorized)
               (flat)         b fREPORT_YEAR1999                                          (vectorized)
               (flat)         b fREPORT_YEAR2000                                          (vectorized)
               (flat)         b fREPORT_YEAR2001                                          (vectorized)
               (flat)         b fREPORT_YEAR2002                                          (vectorized)
               (flat)         b fREPORT_YEAR2003                                          (vectorized)
               (flat)         b fREPORT_YEAR2004                                          (vectorized)
               (flat)         b fREPORT_YEAR2005                                          (vectorized)
               (flat)         b fREPORT_YEAR2006                                          (vectorized)
               (flat)         b fREPORT_YEAR2007                                          (vectorized)
               (flat)         b fREPORT_YEAR2008                                          (vectorized)
               (flat)         b fREPORT_YEAR2009                                          (vectorized)
               (flat)         b fREPORT_YEAR2010                                          (vectorized)
               (flat)         b fREPORT_YEAR2011                                          (vectorized)
               (flat)         b fREPORT_YEAR2012                                          (vectorized)
               (flat)         b fREPORT_YEAR2013                                          (vectorized)
               (flat)         b fREPORT_YEAR2014                                          (vectorized)
               (flat)         b fREPORT_YEAR2015                                          (vectorized)
               (flat)         b fREPORT_YEAR2016                                          (vectorized)
               (flat)         b fREPORT_YEAR2017                                          (vectorized)
               (flat)         b fREPORT_YEAR2018                                          (vectorized)
               (flat)         b fREPORT_YEAR2019                                          (vectorized)
               (flat)         b fREPORT_YEAR2020                                          (vectorized)
               (flat)         b fREPORT_YEAR2021                                          (vectorized)
 student_t(3, 0, 2.5) Intercept                                                                default
 student_t(3, 0, 2.5)        sd                                                      0         default
 student_t(3, 0, 2.5)        sd                           REEF_SITE                  0    (vectorized)
 student_t(3, 0, 2.5)        sd        Intercept          REEF_SITE                  0    (vectorized)
 student_t(3, 0, 2.5)        sd                  REEF_SITE_TRANSECT                  0    (vectorized)
 student_t(3, 0, 2.5)        sd        Intercept REEF_SITE_TRANSECT                  0    (vectorized)
options(width=80)

The following link provides some guidance about defining priors. [https://github.com/stan-dev/stan/wiki/Prior-Choice-Recommendations]

When defining our own priors, we typically do not want them to be scaled.

If we wanted to define our own priors that were less vague, yet still not likely to bias the outcomes, we could try the following priors (mainly plucked out of thin air):

  • \(\beta_0\): normal centred at 84 with a standard deviation of 5.9
    • mean of 84: since median(starling$MASS)
    • sd of 5.9: since mad(starling$MASS)
  • \(\beta_1\): normal centred at 0 with a standard deviation of 13
    • sd of 13: since sd(starling$MASS) / apply(model.matrix(~MONTH*SITUATION, starling)[, -1], 2, sd)
  • \(\beta_{2-4}\): normal centred at 0 with a standard deviation of 15
    • sd of 15: since sd(starling$MASS) / apply(model.matrix(~MONTH*SITUATION, starling)[, -1], 2, sd)
  • \(\beta_{5-7}\): normal centred at 0 with a standard deviation of 20
    • sd of 20: since sd(starling$MASS) / apply(model.matrix(~MONTH*SITUATION, starling)[, -1], 2, sd)
  • \(\sigma\): gamma with shape parameters 6.5 and 1
    • 6.5: since sd(starling$MASS)
  • \(\sigma_j\): half-cauchy with parameters 0 and 2.5.
    • 2.: since sqrt(sd(starling$MASS))
  • \(\Sigma\): decov with:
    • regularisation: the exponent for a LKJ prior on the correlation matrix. A value of 1 (default) implies a joint uniform prior
    • concentration: the concentration parameter for a symmetric Dirichlet distribution. A value of 1 (default) implies a joint uniform distribution
    • shape and scale: the shape and scale parameters for a gamma prior on the scale and scale parameters of the decov prior. A value of 1 for both (default) simplifies the gamma prior to a unit-exponential distribution.

Note, for hierarchical models, the model will tend to want to have a large \(sigma\) in order to fit the data better. It is a good idea to regularise this tendency by applying a prior that has most mass around zero. Suitable candidates include:

  • half-t: as the degrees of freedom approach infinity, this will approach a half-normal
  • half-cauchy: this is essentially a half-t with 1 degree of freedom
  • exponential

I will also overlay the raw data for comparison.

ltmp_sub |>
  group_by(fREPORT_YEAR) |>
  summarise(
    Median = median(qlogis(n.points / total.points)),
    MAD = mad(qlogis(n.points / total.points)),
    N = mean(total.points)
  )
# A tibble: 28 × 4
   fREPORT_YEAR Median   MAD     N
   <fct>         <dbl> <dbl> <dbl>
 1 2023         -0.758 0.303  202.
 2 2021         -1.19  0.284  201.
 3 2020         -1.30  0.291  201.
 4 2019         -1.63  0.236  201.
 5 2018         -1.99  0.263  200 
 6 2017         -2.09  0.242  200 
 7 2016         -1.15  0.246  200.
 8 2015         -1.42  0.352  200 
 9 2014         -1.66  0.295  200 
10 2013         -1.90  0.360  200 
# ℹ 18 more rows
priors <- prior(normal(-0.7, 0.3), class = "Intercept") +
  prior(normal(0, 2), class = "b") +
  prior(student_t(3, 0, 0.5), class = "sd")
ltmp_sub.form <- bf(
  n.points | trials(total.points) ~ fREPORT_YEAR +
    (1 | REEF_SITE) + (1 | REEF_SITE_TRANSECT),
  family = binomial(link = "logit")
)
ltmp_sub.brm2 <- brm(ltmp_sub.form,
  data = ltmp_sub,
  prior = priors,
  sample_prior = "only",
  iter = 10000,
  warmup = 5000,
  chains = 3, cores = 3,
  thin = 10,
  refresh = 0,
  control = list(adapt_delta = 0.99, max_treedepth = 20),
  backend = "rstan"
)
Compiling Stan program...
Start sampling
ltmp_sub.brm2 |>
  conditional_effects() |>
  plot(points = TRUE)
Setting all 'trials' variables to 1 by default if not specified otherwise.

ltmp_sub.brm2 |>
  conditional_effects(conditions = data.frame(total.points = 200)) |>
  plot(points = TRUE)

ltmp_sub.brm2 |>
  conditional_effects(conditions = data.frame(total.points = 200)) |>
  plot()

The above seem sufficiently wide whilst at the same time not providing any encouragement for the sampler to wander off into very unsupported areas.

The following takes approx 500 seconds

ltmp_sub.brm3 <- update(ltmp_sub.brm2,
  sample_prior = "yes",
  chains = 3, cores = 3,
  control = list(adapt_delta = 0.99, max_treedepth = 20),
  thin = 10, iter = 10000, warmup = 5000,
  refresh = 100
)
The desired updates require recompiling the model
Compiling Stan program...
Start sampling
Warning: There were 3 divergent transitions after warmup. See
https://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
to find out why this is a problem and how to eliminate them.
Warning: Examine the pairs() plot to diagnose sampling problems
ltmp_sub.brm3 |>
  conditional_effects(conditions = data.frame(total.points = 200)) |>
  plot(points = TRUE)

ltmp_sub.brm3 |> get_variables()
 [1] "b_Intercept"                                            
 [2] "b_fREPORT_YEAR2021"                                     
 [3] "b_fREPORT_YEAR2020"                                     
 [4] "b_fREPORT_YEAR2019"                                     
 [5] "b_fREPORT_YEAR2018"                                     
 [6] "b_fREPORT_YEAR2017"                                     
 [7] "b_fREPORT_YEAR2016"                                     
 [8] "b_fREPORT_YEAR2015"                                     
 [9] "b_fREPORT_YEAR2014"                                     
[10] "b_fREPORT_YEAR2013"                                     
[11] "b_fREPORT_YEAR2012"                                     
[12] "b_fREPORT_YEAR2011"                                     
[13] "b_fREPORT_YEAR2010"                                     
[14] "b_fREPORT_YEAR2009"                                     
[15] "b_fREPORT_YEAR2008"                                     
[16] "b_fREPORT_YEAR2007"                                     
[17] "b_fREPORT_YEAR2006"                                     
[18] "b_fREPORT_YEAR2005"                                     
[19] "b_fREPORT_YEAR2004"                                     
[20] "b_fREPORT_YEAR2003"                                     
[21] "b_fREPORT_YEAR2002"                                     
[22] "b_fREPORT_YEAR2001"                                     
[23] "b_fREPORT_YEAR2000"                                     
[24] "b_fREPORT_YEAR1999"                                     
[25] "b_fREPORT_YEAR1998"                                     
[26] "b_fREPORT_YEAR1997"                                     
[27] "b_fREPORT_YEAR1996"                                     
[28] "b_fREPORT_YEAR1995"                                     
[29] "sd_REEF_SITE__Intercept"                                
[30] "sd_REEF_SITE_TRANSECT__Intercept"                       
[31] "Intercept"                                              
[32] "r_REEF_SITE[Agincourt.Reef.No.1.1,Intercept]"           
[33] "r_REEF_SITE[Agincourt.Reef.No.1.2,Intercept]"           
[34] "r_REEF_SITE[Agincourt.Reef.No.1.3,Intercept]"           
[35] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.1.1,Intercept]"
[36] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.1.2,Intercept]"
[37] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.1.3,Intercept]"
[38] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.1.4,Intercept]"
[39] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.1.5,Intercept]"
[40] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.2.1,Intercept]"
[41] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.2.2,Intercept]"
[42] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.2.3,Intercept]"
[43] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.2.4,Intercept]"
[44] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.2.5,Intercept]"
[45] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.3.1,Intercept]"
[46] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.3.2,Intercept]"
[47] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.3.3,Intercept]"
[48] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.3.4,Intercept]"
[49] "r_REEF_SITE_TRANSECT[Agincourt.Reef.No.1.3.5,Intercept]"
[50] "prior_Intercept"                                        
[51] "prior_b"                                                
[52] "prior_sd_REEF_SITE"                                     
[53] "prior_sd_REEF_SITE_TRANSECT"                            
[54] "lprior"                                                 
[55] "lp__"                                                   
[56] "accept_stat__"                                          
[57] "stepsize__"                                             
[58] "treedepth__"                                            
[59] "n_leapfrog__"                                           
[60] "divergent__"                                            
[61] "energy__"                                               
ltmp_sub.brm3 |>
  hypothesis("fREPORT_YEAR2021=0") %>%
  plot()

## ltmp_sub.brm3 %>% SUYR_prior_and_posterior()

9 MCMC sampling diagnostics

The bayesplot package offers a range of MCMC diagnostics as well as Posterior Probability Checks (PPC), all of which have a convenient plot() interface. Lets start with the MCMC diagnostics.

See list of available diagnostics by name
available_mcmc()
bayesplot MCMC module:
  mcmc_acf
  mcmc_acf_bar
  mcmc_areas
  mcmc_areas_ridges
  mcmc_combo
  mcmc_dens
  mcmc_dens_chains
  mcmc_dens_overlay
  mcmc_hex
  mcmc_hist
  mcmc_hist_by_chain
  mcmc_intervals
  mcmc_neff
  mcmc_neff_hist
  mcmc_nuts_acceptance
  mcmc_nuts_divergence
  mcmc_nuts_energy
  mcmc_nuts_stepsize
  mcmc_nuts_treedepth
  mcmc_pairs
  mcmc_parcoord
  mcmc_rank_ecdf
  mcmc_rank_hist
  mcmc_rank_overlay
  mcmc_recover_hist
  mcmc_recover_intervals
  mcmc_recover_scatter
  mcmc_rhat
  mcmc_rhat_hist
  mcmc_scatter
  mcmc_trace
  mcmc_trace_highlight
  mcmc_violin

Of these, we will focus on:

  • trace: this plots the estimates of each parameter over the post-warmup length of each MCMC chain. Each chain is plotted in a different shade of blue, with each parameter in its own facet. Ideally, each trace should just look like noise without any discernible drift and each of the traces for a specific parameter should look the same (i.e, should not be displaced above or below any other trace for that parameter).
pars <- ltmp_sub.brm3 |>
  get_variables() |>
  str_subset("^b_.*|^sd.*")
pars
 [1] "b_Intercept"                      "b_fREPORT_YEAR2021"              
 [3] "b_fREPORT_YEAR2020"               "b_fREPORT_YEAR2019"              
 [5] "b_fREPORT_YEAR2018"               "b_fREPORT_YEAR2017"              
 [7] "b_fREPORT_YEAR2016"               "b_fREPORT_YEAR2015"              
 [9] "b_fREPORT_YEAR2014"               "b_fREPORT_YEAR2013"              
[11] "b_fREPORT_YEAR2012"               "b_fREPORT_YEAR2011"              
[13] "b_fREPORT_YEAR2010"               "b_fREPORT_YEAR2009"              
[15] "b_fREPORT_YEAR2008"               "b_fREPORT_YEAR2007"              
[17] "b_fREPORT_YEAR2006"               "b_fREPORT_YEAR2005"              
[19] "b_fREPORT_YEAR2004"               "b_fREPORT_YEAR2003"              
[21] "b_fREPORT_YEAR2002"               "b_fREPORT_YEAR2001"              
[23] "b_fREPORT_YEAR2000"               "b_fREPORT_YEAR1999"              
[25] "b_fREPORT_YEAR1998"               "b_fREPORT_YEAR1997"              
[27] "b_fREPORT_YEAR1996"               "b_fREPORT_YEAR1995"              
[29] "sd_REEF_SITE__Intercept"          "sd_REEF_SITE_TRANSECT__Intercept"
ltmp_sub.brm3 |> mcmc_plot(type = "trace", variable = pars)

The chains appear well mixed and very similar

  • acf_bar (autocorrelation function): plots the autocorrelation between successive MCMC sample lags for each parameter and each chain
ltmp_sub.brm3 |> mcmc_plot(type = "acf_bar", variable = pars)

There is no evidence of auto-correlation in the MCMC samples

  • rhat_hist: Rhat is a scale reduction factor measure of convergence between the chains. The closer the values are to 1, the more the chains have converged. Values greater than 1.05 indicate a lack of convergence. There will be an Rhat value for each parameter estimated.
ltmp_sub.brm3 |> mcmc_plot(type = "rhat_hist")
`stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

All Rhat values are below 1.05, suggesting the chains have converged.

  • neff_hist (number of effective samples): the ratio of the number of effective samples (those not rejected by the sampler) to the number of samples provides an indication of the effectiveness (and efficiency) of the MCMC sampler. Ratios that are less than 0.5 for a parameter suggest that the sampler spent considerable time in difficult areas of the sampling domain and rejected more than half of the samples (replacing them with the previous effective sample).

    If the ratios are low, tightening the priors may help.

ltmp_sub.brm2 |> mcmc_plot(type = "neff_hist")
`stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Ratios all very high.

More diagnostics
ltmp_sub.brm3 |> mcmc_plot(type = "combo", variable = pars)

ltmp_sub.brm3 |> mcmc_plot(type = "violin", variable = pars)

The rstan package offers a range of MCMC diagnostics. Lets start with the MCMC diagnostics.

Of these, we will focus on:

  • stan_trace: this plots the estimates of each parameter over the post-warmup length of each MCMC chain. Each chain is plotted in a different colour, with each parameter in its own facet. Ideally, each trace should just look like noise without any discernible drift and each of the traces for a specific parameter should look the same (i.e, should not be displaced above or below any other trace for that parameter).
pars <- ltmp_sub.brm3 |>
  get_variables() |>
  str_subset("^b_.*|^sd.*")
pars
 [1] "b_Intercept"                      "b_fREPORT_YEAR2021"              
 [3] "b_fREPORT_YEAR2020"               "b_fREPORT_YEAR2019"              
 [5] "b_fREPORT_YEAR2018"               "b_fREPORT_YEAR2017"              
 [7] "b_fREPORT_YEAR2016"               "b_fREPORT_YEAR2015"              
 [9] "b_fREPORT_YEAR2014"               "b_fREPORT_YEAR2013"              
[11] "b_fREPORT_YEAR2012"               "b_fREPORT_YEAR2011"              
[13] "b_fREPORT_YEAR2010"               "b_fREPORT_YEAR2009"              
[15] "b_fREPORT_YEAR2008"               "b_fREPORT_YEAR2007"              
[17] "b_fREPORT_YEAR2006"               "b_fREPORT_YEAR2005"              
[19] "b_fREPORT_YEAR2004"               "b_fREPORT_YEAR2003"              
[21] "b_fREPORT_YEAR2002"               "b_fREPORT_YEAR2001"              
[23] "b_fREPORT_YEAR2000"               "b_fREPORT_YEAR1999"              
[25] "b_fREPORT_YEAR1998"               "b_fREPORT_YEAR1997"              
[27] "b_fREPORT_YEAR1996"               "b_fREPORT_YEAR1995"              
[29] "sd_REEF_SITE__Intercept"          "sd_REEF_SITE_TRANSECT__Intercept"
ltmp_sub.brm3$fit |>
  stan_trace(pars = pars)

The chains appear well mixed and very similar

  • stan_acf (auto-correlation function): plots the auto-correlation between successive MCMC sample lags for each parameter and each chain
ltmp_sub.brm3$fit |>
  stan_ac(pars = pars)

There is no evidence of auto-correlation in the MCMC samples

  • stan_rhat: Rhat is a scale reduction factor measure of convergence between the chains. The closer the values are to 1, the more the chains have converged. Values greater than 1.05 indicate a lack of convergence. There will be an Rhat value for each parameter estimated.
ltmp_sub.brm3$fit |> stan_rhat()
`stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

All Rhat values are below 1.05, suggesting the chains have converged.

  • stan_ess (number of effective samples): the ratio of the number of effective samples (those not rejected by the sampler) to the number of samples provides an indication of the effectiveness (and efficiency) of the MCMC sampler. Ratios that are less than 0.5 for a parameter suggest that the sampler spent considerable time in difficult areas of the sampling domain and rejected more than half of the samples (replacing them with the previous effective sample).

    If the ratios are low, tightening the priors may help.

ltmp_sub.brm3$fit |> stan_ess()
`stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Ratios all very high.

ltmp_sub.brm3$fit |>
  stan_dens(separate_chains = TRUE, pars = pars)

10 Model validation

Post predictive checks provide additional diagnostics about the fit of the model. Specifically, they provide a comparison between predictions drawn from the model and the observed data used to train the model.

See list of available diagnostics by name
available_ppc()
bayesplot PPC module:
  ppc_bars
  ppc_bars_grouped
  ppc_boxplot
  ppc_dens
  ppc_dens_overlay
  ppc_dens_overlay_grouped
  ppc_dots
  ppc_ecdf_overlay
  ppc_ecdf_overlay_grouped
  ppc_error_binned
  ppc_error_hist
  ppc_error_hist_grouped
  ppc_error_scatter
  ppc_error_scatter_avg
  ppc_error_scatter_avg_grouped
  ppc_error_scatter_avg_vs_x
  ppc_freqpoly
  ppc_freqpoly_grouped
  ppc_hist
  ppc_intervals
  ppc_intervals_grouped
  ppc_km_overlay
  ppc_km_overlay_grouped
  ppc_loo_intervals
  ppc_loo_pit_ecdf
  ppc_loo_pit_overlay
  ppc_loo_pit_qq
  ppc_loo_ribbon
  ppc_pit_ecdf
  ppc_pit_ecdf_grouped
  ppc_ribbon
  ppc_ribbon_grouped
  ppc_rootogram
  ppc_scatter
  ppc_scatter_avg
  ppc_scatter_avg_grouped
  ppc_stat
  ppc_stat_2d
  ppc_stat_freqpoly
  ppc_stat_freqpoly_grouped
  ppc_stat_grouped
  ppc_violin_grouped
  • dens_overlay: plots the density distribution of the observed data (black line) overlayed on top of 50 density distributions generated from draws from the model (light blue). Ideally, the 50 realisations should be roughly consistent with the observed data.
ltmp_sub.brm3 |> pp_check(type = "dens_overlay", ndraws = 100)

The model draws appear to be consistent with the observed data.

  • error_scatter_avg: this plots the observed values against the average residuals. Similar to a residual plot, we do not want to see any patterns in this plot. Note, this is not really that useful for models that involve a binomial response
## ltmp_sub.brm3 %>% pp_check(type = 'error_scatter_avg')

This is not really interpretable

  • intervals: plots the observed data overlayed on top of posterior predictions associated with each level of the predictor. Ideally, the observed data should all fall within the predictive intervals.
ltmp_sub.brm3 %>% pp_check(group = "REEF", type = "intervals")
Using all posterior draws for ppc type 'intervals' by default.

The shinystan package allows the full suite of MCMC diagnostics and posterior predictive checks to be accessed via a web interface.

# library(shinystan)
# launch_shinystan(ltmp_sub.brm2)

DHARMa residuals provide very useful diagnostics. Unfortunately, we cannot directly use the simulateResiduals() function to generate the simulated residuals. However, if we are willing to calculate some of the components yourself, we can still obtain the simulated residuals from the fitted stan model.

We need to supply:

  • simulated (predicted) responses associated with each observation.
  • observed values
  • fitted (predicted) responses (averaged) associated with each observation
ltmp_sub.resids <- make_brms_dharma_res(ltmp_sub.brm3, integerResponse = TRUE)
wrap_elements(~ testUniformity(ltmp_sub.resids)) +
  wrap_elements(~ plotResiduals(ltmp_sub.resids, form = factor(rep(1, nrow(ltmp_sub))))) +
  wrap_elements(~ plotResiduals(ltmp_sub.resids, quantreg = TRUE)) +
  wrap_elements(~ testDispersion(ltmp_sub.resids))

resids2 <- recalculateResiduals(ltmp_sub.resids, group = unique(ltmp_sub$fREPORT_YEAR))
testTemporalAutocorrelation(resids2, time = unique(ltmp_sub$fREPORT_YEAR))


    Durbin-Watson test

data:  simulationOutput$scaledResiduals ~ 1
DW = 1.8473, p-value = 0.6824
alternative hypothesis: true autocorrelation is not 0
library(geoR)
Warning: no DISPLAY variable so Tk is not available
--------------------------------------------------------------
 Analysis of Geostatistical Data
 For an Introduction to geoR go to http://www.leg.ufpr.br/geoR
 geoR version 1.9-6 (built on 2025-08-29) is now loaded
--------------------------------------------------------------
autocor_check(ltmp_sub, ltmp_sub.brm3, variable = "fREPORT_YEAR", n.sim = 250)
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
variog: computing omnidirectional variogram
variog: co-locatted data found, adding one bin at the origin
Using  as id variables
Warning: Using `size` aesthetic for lines was deprecated in ggplot2 3.4.0.
ℹ Please use `linewidth` instead.
`geom_smooth()` using formula = 'y ~ x'

11 Model investigation

The summary() method generates simple summaries (mean, standard deviation as well as 10, 50 and 90 percentiles).

ltmp_sub.brm3 %>% summary()
Warning: There were 3 divergent transitions after warmup. Increasing
adapt_delta above 0.99 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
 Family: binomial 
  Links: mu = logit 
Formula: n.points | trials(total.points) ~ fREPORT_YEAR + (1 | REEF_SITE) + (1 | REEF_SITE_TRANSECT) 
   Data: ltmp_sub (Number of observations: 418) 
  Draws: 3 chains, each with iter = 10000; warmup = 5000; thin = 10;
         total post-warmup draws = 1500

Multilevel Hyperparameters:
~REEF_SITE (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.12      0.14     0.00     0.50 1.00     1576     1584

~REEF_SITE_TRANSECT (Number of levels: 15) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.20      0.05     0.14     0.31 1.00     1382     1419

Regression Coefficients:
                 Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept           -0.69      0.11    -0.87    -0.38 1.00     1196     1271
fREPORT_YEAR2021    -0.51      0.06    -0.63    -0.39 1.00     1417     1297
fREPORT_YEAR2020    -0.52      0.06    -0.63    -0.41 1.00     1337     1346
fREPORT_YEAR2019    -0.89      0.06    -1.01    -0.77 1.00     1336     1281
fREPORT_YEAR2018    -1.27      0.07    -1.40    -1.13 1.00     1327     1509
fREPORT_YEAR2017    -1.32      0.07    -1.46    -1.19 1.00     1522     1349
fREPORT_YEAR2016    -0.38      0.06    -0.49    -0.27 1.00     1248     1457
fREPORT_YEAR2015    -0.71      0.06    -0.82    -0.59 1.00     1370     1383
fREPORT_YEAR2014    -1.01      0.07    -1.15    -0.88 1.00     1232     1361
fREPORT_YEAR2013    -1.13      0.07    -1.25    -1.01 1.00     1269     1339
fREPORT_YEAR2012    -1.41      0.07    -1.55    -1.27 1.00     1343     1417
fREPORT_YEAR2011    -0.33      0.05    -0.44    -0.23 1.00     1247     1130
fREPORT_YEAR2010    -0.03      0.06    -0.14     0.08 1.00     1368     1348
fREPORT_YEAR2009    -0.10      0.06    -0.20     0.01 1.00     1342     1457
fREPORT_YEAR2008    -0.34      0.06    -0.46    -0.23 1.00     1237     1330
fREPORT_YEAR2007    -0.45      0.06    -0.56    -0.33 1.00     1394     1501
fREPORT_YEAR2006    -0.29      0.06    -0.40    -0.18 1.00     1147     1328
fREPORT_YEAR2005     0.19      0.05     0.08     0.30 1.00     1209     1251
fREPORT_YEAR2004     0.07      0.05    -0.03     0.18 1.00     1139     1280
fREPORT_YEAR2003     0.14      0.05     0.04     0.25 1.00     1153     1128
fREPORT_YEAR2002     0.24      0.05     0.14     0.35 1.00     1345     1282
fREPORT_YEAR2001     0.16      0.05     0.06     0.27 1.00     1214     1384
fREPORT_YEAR2000     0.09      0.05    -0.01     0.20 1.00     1373     1349
fREPORT_YEAR1999     0.09      0.05    -0.02     0.19 1.00     1186     1416
fREPORT_YEAR1998    -0.14      0.06    -0.25    -0.03 1.00     1153     1405
fREPORT_YEAR1997    -0.38      0.06    -0.50    -0.27 1.00     1211     1501
fREPORT_YEAR1996    -0.38      0.06    -0.48    -0.27 1.00     1355     1312
fREPORT_YEAR1995    -0.58      0.06    -0.71    -0.45 1.00     1328     1461

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).
Warning: There were 3 divergent transitions after warmup. Increasing
adapt_delta above 0.99 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
ltmp_sub.brm3 |> as_draws_df()
# A draws_df: 500 iterations, 3 chains, and 55 variables
   b_Intercept b_fREPORT_YEAR2021 b_fREPORT_YEAR2020 b_fREPORT_YEAR2019
1        -0.71              -0.58              -0.60              -0.91
2        -0.70              -0.50              -0.51              -1.00
3        -0.58              -0.59              -0.56              -0.97
4        -0.82              -0.53              -0.48              -1.03
5        -0.81              -0.50              -0.43              -0.82
6        -0.65              -0.52              -0.48              -0.94
7        -0.70              -0.60              -0.53              -0.80
8        -0.76              -0.53              -0.53              -0.96
9        -0.78              -0.55              -0.46              -0.82
10       -0.67              -0.41              -0.45              -0.93
   b_fREPORT_YEAR2018 b_fREPORT_YEAR2017 b_fREPORT_YEAR2016 b_fREPORT_YEAR2015
1                -1.4               -1.3              -0.42              -0.75
2                -1.2               -1.2              -0.34              -0.74
3                -1.3               -1.4              -0.58              -0.78
4                -1.3               -1.4              -0.42              -0.69
5                -1.2               -1.3              -0.40              -0.71
6                -1.4               -1.3              -0.36              -0.70
7                -1.2               -1.4              -0.41              -0.72
8                -1.2               -1.3              -0.46              -0.61
9                -1.2               -1.2              -0.31              -0.65
10               -1.2               -1.3              -0.43              -0.61
# ... with 1490 more draws, and 47 more variables
# ... hidden reserved variables {'.chain', '.iteration', '.draw'}
ltmp_sub.brm3 |>
  as_draws_df() |>
  dplyr::select(matches("^b_.*|^sd_.*")) |>
  mutate(across(matches("b_.*"), plogis)) |>
  summarise_draws(
    median,
    HDInterval::hdi,
    Pl = ~ mean(.x < 1),
    Pg = ~ mean(.x > 1),
    rhat,
    ess_bulk
  )
Warning: Dropping 'draws_df' class as required metadata was removed.
# A tibble: 30 × 8
   variable           median lower upper    Pl    Pg  rhat ess_bulk
   <chr>               <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>    <dbl>
 1 b_Intercept         0.330 0.286 0.382     1     0 1.00     1136.
 2 b_fREPORT_YEAR2021  0.376 0.348 0.403     1     0 1.00     1416.
 3 b_fREPORT_YEAR2020  0.374 0.349 0.401     1     0 1.00     1325.
 4 b_fREPORT_YEAR2019  0.290 0.267 0.316     1     0 1.000    1327.
 5 b_fREPORT_YEAR2018  0.220 0.198 0.243     1     0 1.000    1326.
 6 b_fREPORT_YEAR2017  0.211 0.189 0.234     1     0 0.999    1512.
 7 b_fREPORT_YEAR2016  0.406 0.381 0.434     1     0 1.00     1229.
 8 b_fREPORT_YEAR2015  0.331 0.305 0.357     1     0 1.00     1350.
 9 b_fREPORT_YEAR2014  0.267 0.241 0.292     1     0 1.00     1195.
10 b_fREPORT_YEAR2013  0.244 0.222 0.268     1     0 0.999    1263.
# ℹ 20 more rows
ltmp_sub.brm3 |>
  bayes_R2(re.form = NA, summary = FALSE) |>
  median_hdci()
          y      ymin      ymax .width .point .interval
1 0.6750637 0.6364879 0.7171351   0.95 median      hdci
ltmp_sub.brm3 |>
  bayes_R2(re.form = ~ (1 | REEF_SITE), summary = FALSE) |>
  median_hdci()
          y      ymin      ymax .width .point .interval
1 0.6696107 0.6418767 0.6946751   0.95 median      hdci
ltmp_sub.brm3 |>
  bayes_R2(re.form = ~ (1 | REEF_SITE) + (1 | REEF_SITE_TRANSECT), summary = FALSE) |>
  median_hdci()
          y      ymin      ymax .width .point .interval
1 0.7688076 0.7560056 0.7790249   0.95 median      hdci

12 Further investigations

Lets start by calculating the cell means

ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR, type = "response") |>
  as.data.frame() |>
  mutate(
    fREPORT_YEAR = factor(fREPORT_YEAR, levels = rev(levels(fREPORT_YEAR))),
    REPORT_YEAR = as.numeric(as.character(fREPORT_YEAR))
  ) |>
  ggplot(aes(y = prob, x = REPORT_YEAR)) +
  geom_ribbon(aes(ymin = lower.HPD, ymax = upper.HPD), fill = "orange", alpha = 0.3) +
  geom_line() +
  geom_point() +
  theme_classic() +
  scale_y_continuous("Live hard coral cover", label = scales::percent_format())
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.

newdata <- list(fREPORT_YEAR = c(2011, 2012))
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = newdata,
    type = "response"
  )
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
 fREPORT_YEAR  prob lower.HPD upper.HPD
         2011 0.261    0.2252     0.311
         2012 0.108    0.0866     0.134

Point estimate displayed: median 
Results are back-transformed from the logit scale 
HPD interval probability: 0.95 
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = newdata,
    type = "response"
  ) |>
  pairs(reverse = TRUE)
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
 contrast                            odds.ratio lower.HPD upper.HPD
 fREPORT_YEAR2012 / fREPORT_YEAR2011      0.343     0.296     0.389

Point estimate displayed: median 
Results are back-transformed from the log odds ratio scale 
HPD interval probability: 0.95 
newdata <- list(fREPORT_YEAR = c(2000:2005, 2012:2013))
cmat <- cbind(c(rep(1 / 6, 6), rep(-1 / 2, 2)))
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = newdata,
    type = "response"
  ) |>
  regrid() |>
  contrast(method = list(fREPORT_YEAR = cmat))
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
 contrast     estimate lower.HPD upper.HPD
 fREPORT_YEAR    0.242     0.219     0.272

Point estimate displayed: median 
HPD interval probability: 0.95 
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = list(fREPORT_YEAR = c(2011, 2012)),
    type = "link"
  ) |>
  pairs(reverse = TRUE) |>
  tidy_draws() |>
  exp() |>
  summarise_draws(
    median,
    HDInterval::hdi,
    Pl = ~ mean(.x < 1),
    Pg = ~ mean(.x > 1)
  )
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
# A tibble: 1 × 6
  variable                                     median lower upper    Pl    Pg
  <chr>                                         <dbl> <dbl> <dbl> <dbl> <dbl>
1 contrast fREPORT_YEAR2012 - fREPORT_YEAR2011  0.343 0.296 0.389     1     0
cmat <- cbind(c(rep(-1 / 7, 7), 1))
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = list(fREPORT_YEAR = c(1999:2005, 2012)),
    type = "response"
  ) |>
  contrast(method = list(fREPORT_YEAR = cmat))
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
 contrast     odds.ratio lower.HPD upper.HPD
 fREPORT_YEAR      0.213     0.188     0.238

Point estimate displayed: median 
Results are back-transformed from the log odds ratio scale 
HPD interval probability: 0.95 
ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR,
    at = list(fREPORT_YEAR = c(1999:2005, 2012)),
    type = "response"
  ) |>
  regrid() |>
  contrast(method = list(fREPORT_YEAR = cmat))
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
 contrast     estimate lower.HPD upper.HPD
 fREPORT_YEAR   -0.255    -0.286    -0.229

Point estimate displayed: median 
HPD interval probability: 0.95 
ltmp.form <- bf(
  n.points | trials(total.points) ~ s(REPORT_YEAR) +
    (1 | REEF_SITE) + (1 | REEF_SITE_TRANSECT),
  family = binomial(link = "log")
)
get_prior(ltmp.form, data = ltmp_sub)
                  prior     class           coef              group resp dpar
                 (flat)         b                                            
                 (flat)         b sREPORT_YEAR_1                             
 student_t(3, 3.9, 2.5) Intercept                                            
   student_t(3, 0, 2.5)        sd                                            
   student_t(3, 0, 2.5)        sd                         REEF_SITE          
   student_t(3, 0, 2.5)        sd      Intercept          REEF_SITE          
   student_t(3, 0, 2.5)        sd                REEF_SITE_TRANSECT          
   student_t(3, 0, 2.5)        sd      Intercept REEF_SITE_TRANSECT          
   student_t(3, 0, 2.5)       sds                                            
   student_t(3, 0, 2.5)       sds s(REPORT_YEAR)                             
 nlpar lb ub       source
                  default
             (vectorized)
                  default
        0         default
        0    (vectorized)
        0    (vectorized)
        0    (vectorized)
        0    (vectorized)
        0         default
        0    (vectorized)
ltmp_sub |>
  summarise(
    Median = median(qlogis(HC / 100)),
    MAD = mad(qlogis(HC / 100)),
    N = mean(total.points)
  )
# A tibble: 1 × 3
  Median   MAD     N
   <dbl> <dbl> <dbl>
1  -1.10 0.579  198.
priors <- prior(normal(-1.1, 0.8), class = "Intercept") +
  prior(normal(0, 1), class = "b") +
  prior(student_t(3, 0, 1), class = "sd") +
  prior(student_t(3, 0, 10), class = "sds")
ltmp_sub.brm4 <- brm(ltmp.form,
  data = ltmp_sub,
  prior = priors,
  sample_prior = "yes",
  iter = 5000,
  warmup = 2000,
  chains = 3, cores = 3,
  thin = 10,
  refresh = 100,
  control = list(adapt_delta = 0.99, max_treedepth = 20),
  backend = "rstan"
)
Compiling Stan program...
Start sampling
##
ltmp_sub.brm4 |>
  conditional_effects(conditions = data.frame(total.points = 200)) |>
  plot(points = TRUE)

data.frame(mgcv::smoothCon(s(REPORT_YEAR), data = ltmp_sub)[[1]]$X) |>
  head()
        X1        X2       X3        X4       X5        X6       X7      X8 X9
1 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
2 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
3 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
4 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
5 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
6 1.613482 0.7174855 1.797952 0.9553312 -1.98971 -1.168654 2.144241 1.38704  1
        X10
1 -1.660199
2 -1.660199
3 -1.660199
4 -1.660199
5 -1.660199
6 -1.660199
og probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[3] is 3.34124, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 649.427, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 7.24472, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 85.5486, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[6] is 3.47279, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[2] is 1.09665, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 5.88037, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 2.79872, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 1.69481, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[6] is 1.89914, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[185] is 1.0044, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 4.91291, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[12] is 1.19554, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[1] is 844.801, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[13] is 1.34318, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: Rejecting initial value:
Chain 3:   Error evaluating the log probability at the initial value.
Chain 3: Exception: binomial_lpmf: Probability parameter[182] is 1.03449, but must be in the interval [0, 1] (in 'anon_model', line 74, column 4 to column 44)
Chain 3: 
Chain 3: Gradient evaluation took 0.000429 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 4.29 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 5000 [  0%]  (Warmup)
Chain 2: Iteration:  100 / 5000 [  2%]  (Warmup)
Chain 1: Iteration:  100 / 5000 [  2%]  (Warmup)
Chain 3: Iteration:  100 / 5000 [  2%]  (Warmup)
Chain 2: Iteration:  200 / 5000 [  4%]  (Warmup)
Chain 2: Iteration:  300 / 5000 [  6%]  (Warmup)
Chain 3: Iteration:  200 / 5000 [  4%]  (Warmup)
Chain 2: Iteration:  400 / 5000 [  8%]  (Warmup)
Chain 1: Iteration:  200 / 5000 [  4%]  (Warmup)
Chain 3: Iteration:  300 / 5000 [  6%]  (Warmup)
Chain 2: Iteration:  500 / 5000 [ 10%]  (Warmup)
Chain 1: Iteration:  300 / 5000 [  6%]  (Warmup)
Chain 3: Iteration:  400 / 5000 [  8%]  (Warmup)
Chain 1: Iteration:  400 / 5000 [  8%]  (Warmup)
Chain 2: Iteration:  600 / 5000 [ 12%]  (Warmup)
Chain 3: Iteration:  500 / 5000 [ 10%]  (Warmup)
Chain 3: Iteration:  600 / 5000 [ 12%]  (Warmup)
Chain 2: Iteration:  700 / 5000 [ 14%]  (Warmup)
Chain 1: Iteration:  500 / 5000 [ 10%]  (Warmup)
Chain 3: Iteration:  700 / 5000 [ 14%]  (Warmup)
Chain 2: Iteration:  800 / 5000 [ 16%]  (Warmup)
Chain 3: Iteration:  800 / 5000 [ 16%]  (Warmup)
Chain 1: Iteration:  600 / 5000 [ 12%]  (Warmup)
Chain 3: Iteration:  900 / 5000 [ 18%]  (Warmup)
Chain 2: Iteration:  900 / 5000 [ 18%]  (Warmup)
Chain 1: Iteration:  700 / 5000 [ 14%]  (Warmup)
Chain 3: Iteration: 1000 / 5000 [ 20%]  (Warmup)
Chain 3: Iteration: 1100 / 5000 [ 22%]  (Warmup)
Chain 2: Iteration: 1000 / 5000 [ 20%]  (Warmup)
Chain 1: Iteration:  800 / 5000 [ 16%]  (Warmup)
Chain 3: Iteration: 1200 / 5000 [ 24%]  (Warmup)
Chain 2: Iteration: 1100 / 5000 [ 22%]  (Warmup)
Chain 3: Iteration: 1300 / 5000 [ 26%]  (Warmup)
Chain 1: Iteration:  900 / 5000 [ 18%]  (Warmup)
Chain 2: Iteration: 1200 / 5000 [ 24%]  (Warmup)
Chain 3: Iteration: 1400 / 5000 [ 28%]  (Warmup)
Chain 1: Iteration: 1000 / 5000 [ 20%]  (Warmup)
Chain 3: Iteration: 1500 / 5000 [ 30%]  (Warmup)
Chain 2: Iteration: 1300 / 5000 [ 26%]  (Warmup)
Chain 1: Iteration: 1100 / 5000 [ 22%]  (Warmup)
Chain 3: Iteration: 1600 / 5000 [ 32%]  (Warmup)
Chain 2: Iteration: 1400 / 5000 [ 28%]  (Warmup)
Chain 1: Iteration: 1200 / 5000 [ 24%]  (Warmup)
Chain 3: Iteration: 1700 / 5000 [ 34%]  (Warmup)
Chain 3: Iteration: 1800 / 5000 [ 36%]  (Warmup)
Chain 1: Iteration: 1300 / 5000 [ 26%]  (Warmup)
Chain 2: Iteration: 1500 / 5000 [ 30%]  (Warmup)
Chain 3: Iteration: 1900 / 5000 [ 38%]  (Warmup)
Chain 1: Iteration: 1400 / 5000 [ 28%]  (Warmup)
Chain 2: Iteration: 1600 / 5000 [ 32%]  (Warmup)
Chain 3: Iteration: 2000 / 5000 [ 40%]  (Warmup)
Chain 3: Iteration: 2001 / 5000 [ 40%]  (Sampling)
Chain 3: Iteration: 2100 / 5000 [ 42%]  (Sampling)
Chain 1: Iteration: 1500 / 5000 [ 30%]  (Warmup)
Chain 2: Iteration: 1700 / 5000 [ 34%]  (Warmup)
Chain 3: Iteration: 2200 / 5000 [ 44%]  (Sampling)
Chain 1: Iteration: 1600 / 5000 [ 32%]  (Warmup)
Chain 3: Iteration: 2300 / 5000 [ 46%]  (Sampling)
Chain 2: Iteration: 1800 / 5000 [ 36%]  (Warmup)
Chain 3: Iteration: 2400 / 5000 [ 48%]  (Sampling)
Chain 1: Iteration: 1700 / 5000 [ 34%]  (Warmup)
Chain 2: Iteration: 1900 / 5000 [ 38%]  (Warmup)
Chain 3: Iteration: 2500 / 5000 [ 50%]  (Sampling)
Chain 1: Iteration: 1800 / 5000 [ 36%]  (Warmup)
Chain 3: Iteration: 2600 / 5000 [ 52%]  (Sampling)
Chain 2: Iteration: 2000 / 5000 [ 40%]  (Warmup)
Chain 2: Iteration: 2001 / 5000 [ 40%]  (Sampling)
Chain 3: Iteration: 2700 / 5000 [ 54%]  (Sampling)
Chain 1: Iteration: 1900 / 5000 [ 38%]  (Warmup)
Chain 2: Iteration: 2100 / 5000 [ 42%]  (Sampling)
Chain 3: Iteration: 2800 / 5000 [ 56%]  (Sampling)
Chain 3: Iteration: 2900 / 5000 [ 58%]  (Sampling)
Chain 2: Iteration: 2200 / 5000 [ 44%]  (Sampling)
Chain 3: Iteration: 3000 / 5000 [ 60%]  (Sampling)
Chain 1: Iteration: 2000 / 5000 [ 40%]  (Warmup)
Chain 1: Iteration: 2001 / 5000 [ 40%]  (Sampling)
Chain 2: Iteration: 2300 / 5000 [ 46%]  (Sampling)
Chain 3: Iteration: 3100 / 5000 [ 62%]  (Sampling)
Chain 3: Iteration: 3200 / 5000 [ 64%]  (Sampling)
Chain 2: Iteration: 2400 / 5000 [ 48%]  (Sampling)
Chain 1: Iteration: 2100 / 5000 [ 42%]  (Sampling)
Chain 3: Iteration: 3300 / 5000 [ 66%]  (Sampling)
Chain 2: Iteration: 2500 / 5000 [ 50%]  (Sampling)
Chain 3: Iteration: 3400 / 5000 [ 68%]  (Sampling)
Chain 3: Iteration: 3500 / 5000 [ 70%]  (Sampling)
Chain 2: Iteration: 2600 / 5000 [ 52%]  (Sampling)
Chain 1: Iteration: 2200 / 5000 [ 44%]  (Sampling)
Chain 3: Iteration: 3600 / 5000 [ 72%]  (Sampling)
Chain 2: Iteration: 2700 / 5000 [ 54%]  (Sampling)
Chain 3: Iteration: 3700 / 5000 [ 74%]  (Sampling)
Chain 1: Iteration: 2300 / 5000 [ 46%]  (Sampling)
Chain 2: Iteration: 2800 / 5000 [ 56%]  (Sampling)
Chain 3: Iteration: 3800 / 5000 [ 76%]  (Sampling)
Chain 3: Iteration: 3900 / 5000 [ 78%]  (Sampling)
Chain 2: Iteration: 2900 / 5000 [ 58%]  (Sampling)
Chain 3: Iteration: 4000 / 5000 [ 80%]  (Sampling)
Chain 1: Iteration: 2400 / 5000 [ 48%]  (Sampling)
Chain 2: Iteration: 3000 / 5000 [ 60%]  (Sampling)
Chain 3: Iteration: 4100 / 5000 [ 82%]  (Sampling)
Chain 3: Iteration: 4200 / 5000 [ 84%]  (Sampling)
Chain 2: Iteration: 3100 / 5000 [ 62%]  (Sampling)
Chain 1: Iteration: 2500 / 5000 [ 50%]  (Sampling)
Chain 3: Iteration: 4300 / 5000 [ 86%]  (Sampling)
Chain 2: Iteration: 3200 / 5000 [ 64%]  (Sampling)
Chain 3: Iteration: 4400 / 5000 [ 88%]  (Sampling)
Chain 3: Iteration: 4500 / 5000 [ 90%]  (Sampling)
Chain 2: Iteration: 3300 / 5000 [ 66%]  (Sampling)
Chain 1: Iteration: 2600 / 5000 [ 52%]  (Sampling)
Chain 3: Iteration: 4600 / 5000 [ 92%]  (Sampling)
Chain 2: Iteration: 3400 / 5000 [ 68%]  (Sampling)
Chain 3: Iteration: 4700 / 5000 [ 94%]  (Sampling)
Chain 2: Iteration: 3500 / 5000 [ 70%]  (Sampling)
Chain 1: Iteration: 2700 / 5000 [ 54%]  (Sampling)
Chain 2: Iteration: 3600 / 5000 [ 72%]  (Sampling)
Chain 3: Iteration: 4800 / 5000 [ 96%]  (Sampling)
Chain 2: Iteration: 3700 / 5000 [ 74%]  (Sampling)
Chain 2: Iteration: 3800 / 5000 [ 76%]  (Sampling)
Chain 3: Iteration: 4900 / 5000 [ 98%]  (Sampling)
Chain 1: Iteration: 2800 / 5000 [ 56%]  (Sampling)
Chain 2: Iteration: 3900 / 5000 [ 78%]  (Sampling)
Chain 3: Iteration: 5000 / 5000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 248.841 seconds (Warm-up)
Chain 3:                344.837 seconds (Sampling)
Chain 3:                593.678 seconds (Total)
Chain 3: 
Chain 2: Iteration: 4000 / 5000 [ 80%]  (Sampling)
Chain 1: Iteration: 2900 / 5000 [ 58%]  (Sampling)
Chain 2: Iteration: 4100 / 5000 [ 82%]  (Sampling)
Chain 2: Iteration: 4200 / 5000 [ 84%]  (Sampling)
Chain 1: Iteration: 3000 / 5000 [ 60%]  (Sampling)
Chain 2: Iteration: 4300 / 5000 [ 86%]  (Sampling)
Chain 2: Iteration: 4400 / 5000 [ 88%]  (Sampling)
Chain 1: Iteration: 3100 / 5000 [ 62%]  (Sampling)
Chain 2: Iteration: 4500 / 5000 [ 90%]  (Sampling)
Chain 2: Iteration: 4600 / 5000 [ 92%]  (Sampling)
Chain 1: Iteration: 3200 / 5000 [ 64%]  (Sampling)
Chain 2: Iteration: 4700 / 5000 [ 94%]  (Sampling)
Chain 2: Iteration: 4800 / 5000 [ 96%]  (Sampling)
Chain 1: Iteration: 3300 / 5000 [ 66%]  (Sampling)
Chain 2: Iteration: 4900 / 5000 [ 98%]  (Sampling)
Chain 2: Iteration: 5000 / 5000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 318.853 seconds (Warm-up)
Chain 2:                373.683 seconds (Sampling)
Chain 2:                692.536 seconds (Total)
Chain 2: 
Chain 1: Iteration: 3400 / 5000 [ 68%]  (Sampling)
Chain 1: Iteration: 3500 / 5000 [ 70%]  (Sampling)
Chain 1: Iteration: 3600 / 5000 [ 72%]  (Sampling)
Chain 1: Iteration: 3700 / 5000 [ 74%]  (Sampling)
Chain 1: Iteration: 3800 / 5000 [ 76%]  (Sampling)
Chain 1: Iteration: 3900 / 5000 [ 78%]  (Sampling)
Chain 1: Iteration: 4000 / 5000 [ 80%]  (Sampling)
Chain 1: Iteration: 4100 / 5000 [ 82%]  (Sampling)
Chain 1: Iteration: 4200 / 5000 [ 84%]  (Sampling)
Chain 1: Iteration: 4300 / 5000 [ 86%]  (Sampling)
Chain 1: Iteration: 4400 / 5000 [ 88%]  (Sampling)
Chain 1: Iteration: 4500 / 5000 [ 90%]  (Sampling)
Chain 1: Iteration: 4600 / 5000 [ 92%]  (Sampling)
Chain 1: Iteration: 4700 / 5000 [ 94%]  (Sampling)
Chain 1: Iteration: 4800 / 5000 [ 96%]  (Sampling)
Chain 1: Iteration: 4900 / 5000 [ 98%]  (Sampling)
Chain 1: Iteration: 5000 / 5000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 358.761 seconds (Warm-up)
Chain 1:                617.469 seconds (Sampling)
Chain 1:                976.23 seconds (Total)
Chain 1: 
## data.frame(mgcv::smoothCon(s(REPORT_YEAR, k=3),  data=ltmp_sub)[[1]]$REPORT_YEAR) |>
##   bind_cols(ltmp_sub)

gratia::basis(s(REPORT_YEAR), data = ltmp_sub) |> gratia::draw()

gratia::basis(s(REPORT_YEAR, bs = "cr"), data = ltmp_sub) |> gratia::draw()

gratia::basis(s(REPORT_YEAR, bs = "cr", k = 3), data = ltmp_sub) |> gratia::draw()

ltmp_glmm <- ltmp_sub.brm3 |>
  emmeans(~fREPORT_YEAR, type = "response") |>
  as.data.frame() |>
  mutate(
    fREPORT_YEAR = factor(fREPORT_YEAR, levels = rev(levels(fREPORT_YEAR))),
    REPORT_YEAR = as.numeric(as.character(fREPORT_YEAR))
  )
Warning: Method 'parse_bf' is deprecated. Please use 'brmsterms' instead.
ltmp_sub.brm4 |>
  conditional_effects() |>
  plot() |>
  _[[1]] +
  geom_line(
    data = ltmp_glmm, inherit.aes = FALSE,
    aes(y = prob, x = REPORT_YEAR), colour = "blue"
  ) +
  geom_ribbon(
    data = ltmp_glmm, inherit.aes = FALSE,
    aes(
      y = prob, x = REPORT_YEAR,
      min = lower.HPD, ymax = upper.HPD
    ), fill = "orange", alpha = 0.3
  ) +
  theme_classic() +
  scale_y_continuous("Live hard coral cover", label = scales::percent_format())
Setting all 'trials' variables to 1 by default if not specified otherwise.