diff --git a/DESCRIPTION b/DESCRIPTION
index b282d978..295459a6 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -84,9 +84,9 @@ Encoding: UTF-8
Language: en-US
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
-SystemRequirements: Python (>= 2.7.0) with header files and shared
- library; TensorFlow (v1.14; https://www.tensorflow.org/); TensorFlow
- Probability (v0.7.0; https://www.tensorflow.org/probability/)
+SystemRequirements: Python (>= 3.7.0) with header files and shared
+ library; TensorFlow (>= v2.0.0; https://www.tensorflow.org/); TensorFlow
+ Probability (v0.8.0; https://www.tensorflow.org/probability/)
Collate:
'package.R'
'utils.R'
diff --git a/codemeta.json b/codemeta.json
index 52a52b64..798479d7 100644
--- a/codemeta.json
+++ b/codemeta.json
@@ -4,16 +4,17 @@
"identifier": "greta",
"description": "Write statistical models in R and fit them by MCMC and optimisation on CPUs and GPUs, using Google 'TensorFlow'. greta lets you write your own model like in BUGS, JAGS and Stan, except that you write models right in R, it scales well to massive datasets, and it’s easy to extend and build on. See the website for more information, including tutorials, examples, package documentation, and the greta forum.",
"name": "greta: Simple and Scalable Statistical Modelling in R",
- "codeRepository": "https://github.com/njtierney/greta",
+ "relatedLink": ["https://greta-stats.org", "https://CRAN.R-project.org/package=greta"],
+ "codeRepository": "https://github.com/greta-dev/greta",
"issueTracker": "https://github.com/greta-dev/greta/issues",
"license": "https://spdx.org/licenses/Apache-2.0",
- "version": "0.4.4",
+ "version": "0.5.0",
"programmingLanguage": {
"@type": "ComputerLanguage",
"name": "R",
"url": "https://r-project.org"
},
- "runtimePlatform": "R version 4.3.2 (2023-10-31)",
+ "runtimePlatform": "R version 4.4.2 (2024-10-31)",
"provider": {
"@id": "https://cran.r-project.org",
"@type": "Organization",
@@ -156,6 +157,18 @@
},
"sameAs": "https://CRAN.R-project.org/package=DiagrammeR"
},
+ {
+ "@type": "SoftwareApplication",
+ "identifier": "dplyr",
+ "name": "dplyr",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=dplyr"
+ },
{
"@type": "SoftwareApplication",
"identifier": "DiagrammeRsvg",
@@ -276,6 +289,18 @@
},
"sameAs": "https://CRAN.R-project.org/package=mvtnorm"
},
+ {
+ "@type": "SoftwareApplication",
+ "identifier": "purrr",
+ "name": "purrr",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=purrr"
+ },
{
"@type": "SoftwareApplication",
"identifier": "rmarkdown",
@@ -339,15 +364,27 @@
},
{
"@type": "SoftwareApplication",
- "identifier": "tidyverse",
- "name": "tidyverse",
+ "identifier": "tibble",
+ "name": "tibble",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=tibble"
+ },
+ {
+ "@type": "SoftwareApplication",
+ "identifier": "tidyr",
+ "name": "tidyr",
"provider": {
"@id": "https://cran.r-project.org",
"@type": "Organization",
"name": "Comprehensive R Archive Network (CRAN)",
"url": "https://cran.r-project.org"
},
- "sameAs": "https://CRAN.R-project.org/package=tidyverse"
+ "sameAs": "https://CRAN.R-project.org/package=tidyr"
},
{
"@type": "SoftwareApplication",
@@ -372,6 +409,18 @@
"url": "https://cran.r-project.org"
},
"sameAs": "https://CRAN.R-project.org/package=withr"
+ },
+ {
+ "@type": "SoftwareApplication",
+ "identifier": "rstudioapi",
+ "name": "rstudioapi",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=rstudioapi"
}
],
"softwareRequirements": {
@@ -379,7 +428,7 @@
"@type": "SoftwareApplication",
"identifier": "R",
"name": "R",
- "version": ">= 3.1.0"
+ "version": ">= 4.1.0"
},
"2": {
"@type": "SoftwareApplication",
@@ -409,7 +458,7 @@
"@type": "SoftwareApplication",
"identifier": "cli",
"name": "cli",
- "version": ">= 3.0.0",
+ "version": ">= 3.4.1",
"provider": {
"@id": "https://cran.r-project.org",
"@type": "Organization",
@@ -513,10 +562,22 @@
"sameAs": "https://CRAN.R-project.org/package=reticulate"
},
"13": {
+ "@type": "SoftwareApplication",
+ "identifier": "rlang",
+ "name": "rlang",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=rlang"
+ },
+ "14": {
"@type": "SoftwareApplication",
"identifier": "tensorflow",
"name": "tensorflow",
- "version": ">= 2.7.0",
+ "version": "== 2.16.0",
"provider": {
"@id": "https://cran.r-project.org",
"@type": "Organization",
@@ -525,7 +586,29 @@
},
"sameAs": "https://CRAN.R-project.org/package=tensorflow"
},
- "14": {
+ "15": {
+ "@type": "SoftwareApplication",
+ "identifier": "tools",
+ "name": "tools"
+ },
+ "16": {
+ "@type": "SoftwareApplication",
+ "identifier": "utils",
+ "name": "utils"
+ },
+ "17": {
+ "@type": "SoftwareApplication",
+ "identifier": "whisker",
+ "name": "whisker",
+ "provider": {
+ "@id": "https://cran.r-project.org",
+ "@type": "Organization",
+ "name": "Comprehensive R Archive Network (CRAN)",
+ "url": "https://cran.r-project.org"
+ },
+ "sameAs": "https://CRAN.R-project.org/package=whisker"
+ },
+ "18": {
"@type": "SoftwareApplication",
"identifier": "yesno",
"name": "yesno",
@@ -537,9 +620,9 @@
},
"sameAs": "https://CRAN.R-project.org/package=yesno"
},
- "SystemRequirements": "Python (>= 2.7.0) with header files and shared\n library; TensorFlow (v1.14; https://www.tensorflow.org/); TensorFlow\n Probability (v0.7.0; https://www.tensorflow.org/probability/)"
+ "SystemRequirements": "Python (>= 3.7.0) with header files and shared\n library; TensorFlow (>= v2.0.0; https://www.tensorflow.org/); TensorFlow\n Probability (v0.8.0; https://www.tensorflow.org/probability/)"
},
- "fileSize": "2098.014KB",
+ "fileSize": "1625.784KB",
"citation": [
{
"@type": "ScholarlyArticle",
@@ -569,8 +652,7 @@
}
}
],
- "relatedLink": "https://greta-stats.org",
- "releaseNotes": "https://github.com/njtierney/greta/blob/master/NEWS.md",
- "readme": "https://github.com/njtierney/greta/blob/master/README.md",
- "contIntegration": ["https://app.codecov.io/gh/greta-dev/greta?branch=master", "https://github.com/greta-dev/greta/actions"]
+ "releaseNotes": "https://github.com/greta-dev/greta/blob/master/NEWS.md",
+ "readme": "https://github.com/greta-dev/greta/blob/master/README.md",
+ "contIntegration": ["https://github.com/greta-dev/greta/actions", "https://app.codecov.io/gh/greta-dev/greta?branch=master"]
}
diff --git a/cran-comments.md b/cran-comments.md
index 4066c41c..452a0cfa 100644
--- a/cran-comments.md
+++ b/cran-comments.md
@@ -1,20 +1,28 @@
## Test environments
-* local R installation, R 4.3.3
+* local R installation, R 4.4.2
* win-builder (devel)
## R CMD check results
-0 errors | 0 warnings | 0 notes
+0 errors | 0 warnings | 1 notes
-* Days since last update: 38
+> Found the following (possibly) invalid URLs:
+ URL: http://www.phidot.org/software/mark/docs/book/
+ From: inst/doc/example_models.html
+ Status: 403
+ Message: Forbidden
+
+We could not find an issue with this link, or an alternative link.
+
+* Days since last update: 244 days
## Submission notes
-This release is due to a bug we discovered with trailing commas from the `glue` package (https://github.com/tidyverse/glue/issues/320). It is a very small change, but it is very critical. We have also
+This release is a substantial overhaul of the internals of greta to migrate the internals from tensorflow 1 to tensorflow 2.
## revdepcheck results
-We checked 3 reverse dependencies, comparing R CMD check results across CRAN and dev versions of this package.
+We checked 1 reverse dependencies, comparing R CMD check results across CRAN and dev versions of this package.
* We saw 0 new problems
* We failed to check 0 packages
diff --git a/inst/CITATION b/inst/CITATION
index f857195c..e80c2123 100644
--- a/inst/CITATION
+++ b/inst/CITATION
@@ -1,19 +1,11 @@
-citHeader("To cite greta in publications use:")
-
-citEntry(entry = "Article",
- title = "{greta}: simple and scalable statistical modelling in R",
- author = personList(as.person("Nick Golding")),
- journal = "Journal of Open Source Software",
- year = "2019",
- volume = "4",
- number = "40",
- pages = "1601",
- url = "http://dx.doi.org/10.21105/joss.01601",
- doi = "10.21105/joss.01601",
-
- textVersion =
- paste("Nick Golding (2019).",
- "greta: simple and scalable statistical modelling in R.",
- "Journal of Open Source Software, 4(40), 1601.",
- "URL http://dx.doi.org/10.21105/joss.01601.")
+bibentry(
+ bibtype = "Article",
+ title = "{greta}: simple and scalable statistical modelling in R",
+ author = "Nick Golding",
+ journal = "Journal of Open Source Software",
+ year = 2019,
+ volume = 4,
+ number = 40,
+ pages = 1601,
+ doi = "10.21105/joss.01601"
)
diff --git a/revdep/checks.noindex/libraries.csv b/revdep/checks.noindex/libraries.csv
index 0b02e6a8..b0fcc7d9 100644
--- a/revdep/checks.noindex/libraries.csv
+++ b/revdep/checks.noindex/libraries.csv
@@ -1,4 +1,44 @@
package,old,new,delta
-greta,0.4.3,0.4.4,*
-future,NA,1.33.1,*
-Rcpp,NA,1.0.12,*
+greta,0.4.5,0.5.0,*
+abind,1.4-8,1.4-8,
+backports,1.5.0,1.5.0,
+base64enc,0.1-3,0.1-3,
+callr,3.7.6,3.7.6,
+cli,3.6.3,3.6.3,
+coda,0.19-4.1,0.19-4.1,
+config,0.3.2,0.3.2,
+crayon,1.5.3,1.5.3,
+digest,0.6.37,0.6.37,
+future,1.34.0,1.34.0,
+globals,0.16.3,0.16.3,
+glue,1.8.0,1.8.0,
+here,1.0.1,1.0.1,
+hms,1.1.3,1.1.3,
+jsonlite,1.8.9,1.8.9,
+lifecycle,1.0.4,1.0.4,
+listenv,0.9.1,0.9.1,
+magrittr,2.0.3,2.0.3,
+parallelly,1.38.0,1.38.0,
+pkgconfig,2.0.3,2.0.3,
+png,0.1-8,0.1-8,
+prettyunits,1.2.0,1.2.0,
+processx,3.8.4,3.8.4,
+progress,1.2.3,1.2.3,
+ps,1.8.1,1.8.1,
+R6,2.5.1,2.5.1,
+rappdirs,0.3.3,0.3.3,
+Rcpp,1.0.13-1,1.0.13-1,
+RcppTOML,0.2.2,0.2.2,
+reticulate,1.39.0,1.39.0,
+rlang,1.1.4,1.1.4,
+rprojroot,2.0.4,2.0.4,
+rstudioapi,0.17.1,0.17.1,
+tensorflow,2.16.0,2.16.0,
+tfautograph,0.3.2,0.3.2,
+tfruns,1.5.3,1.5.3,
+tidyselect,1.2.1,1.2.1,
+vctrs,0.6.5,0.6.5,
+whisker,0.4.1,0.4.1,
+withr,3.0.2,3.0.2,
+yaml,2.3.10,2.3.10,
+yesno,0.1.3,0.1.3,
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/libraries.txt b/revdep/checks.noindex/runMCMCbtadjust/new/libraries.txt
index e21edfb6..862d1852 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/libraries.txt
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/libraries.txt
@@ -1,125 +1,180 @@
Library: /Users/nick/github/greta-dev/greta/revdep/library.noindex/greta/new
-future (1.33.1)
-greta (0.4.4)
-Rcpp (1.0.12)
+abind (1.4-8)
+backports (1.5.0)
+base64enc (0.1-3)
+callr (3.7.6)
+cli (3.6.3)
+coda (0.19-4.1)
+config (0.3.2)
+crayon (1.5.3)
+digest (0.6.37)
+future (1.34.0)
+globals (0.16.3)
+glue (1.8.0)
+greta (0.5.0)
+here (1.0.1)
+hms (1.1.3)
+jsonlite (1.8.9)
+lifecycle (1.0.4)
+listenv (0.9.1)
+magrittr (2.0.3)
+parallelly (1.38.0)
+pkgconfig (2.0.3)
+png (0.1-8)
+prettyunits (1.2.0)
+processx (3.8.4)
+progress (1.2.3)
+ps (1.8.1)
+R6 (2.5.1)
+rappdirs (0.3.3)
+Rcpp (1.0.13-1)
+RcppTOML (0.2.2)
+reticulate (1.39.0)
+rlang (1.1.4)
+rprojroot (2.0.4)
+rstudioapi (0.17.1)
+tensorflow (2.16.0)
+tfautograph (0.3.2)
+tfruns (1.5.3)
+tidyselect (1.2.1)
+vctrs (0.6.5)
+whisker (0.4.1)
+withr (3.0.2)
+yaml (2.3.10)
+yesno (0.1.3)
Library: /Users/nick/github/greta-dev/greta/revdep/library.noindex/runMCMCbtadjust
-abind (1.4-5)
-backports (1.4.1)
-base64enc (0.1-3)
-BH (1.84.0-0)
-bit (4.0.5)
-bit64 (4.0.5)
-brio (1.1.4)
-broom (1.0.5)
-broom.helpers (1.14.0)
-callr (3.7.3)
-checkmate (2.3.1)
-cli (3.6.2)
-clipr (0.8.0)
-coda (0.19-4)
-codetools (0.2-19)
-colorspace (2.1-0)
-commonmark (1.9.0)
-config (0.3.2)
-cpp11 (0.4.7)
-crayon (1.5.2)
-desc (1.4.3)
-diffobj (0.3.5)
-digest (0.6.34)
-dplyr (1.1.4)
-ellipsis (0.3.2)
-evaluate (0.23)
-fansi (1.0.6)
-farver (2.1.1)
-forcats (1.0.0)
-fs (1.6.3)
-future (1.33.1)
-generics (0.1.3)
-GGally (2.2.0)
-ggmcmc (1.5.1.1)
-ggplot2 (3.4.4)
-ggstats (0.5.1)
-globals (0.16.2)
-glue (1.7.0)
-gridExtra (2.3)
-gtable (0.3.4)
-haven (2.5.4)
-here (1.0.1)
-highr (0.10)
-hms (1.1.3)
-igraph (1.6.0)
-inline (0.3.19)
-isoband (0.2.7)
-jsonlite (1.8.8)
-knitr (1.45)
-labeling (0.4.3)
-labelled (2.12.0)
-lattice (0.22-5)
-lifecycle (1.0.4)
-listenv (0.9.0)
-loo (2.6.0)
-magrittr (2.0.3)
-markdown (1.12)
-MASS (7.3-60.0.1)
-Matrix (1.6-5)
-matrixStats (1.2.0)
-mgcv (1.9-1)
-munsell (0.5.0)
-nimble (1.0.1)
-nlme (3.1-164)
-numDeriv (2016.8-1.1)
-parallelly (1.36.0)
-patchwork (1.2.0)
-pillar (1.9.0)
-pkgbuild (1.4.3)
-pkgconfig (2.0.3)
-pkgload (1.3.3)
-plyr (1.8.9)
-png (0.1-8)
-pracma (2.4.4)
-praise (1.0.0)
-prettyunits (1.2.0)
-processx (3.8.3)
-progress (1.2.3)
-ps (1.7.5)
-purrr (1.0.2)
-QuickJSR (1.0.9)
-R6 (2.5.1)
-rappdirs (0.3.3)
-RColorBrewer (1.1-3)
-Rcpp (1.0.12)
-RcppEigen (0.3.3.9.4)
-RcppParallel (5.1.7)
-RcppTOML (0.2.2)
-readr (2.1.5)
-rematch2 (2.1.2)
-reticulate (1.34.0)
-rjags (4-15)
-rlang (1.1.3)
-rprojroot (2.0.4)
-rstan (2.32.5)
-rstudioapi (0.15.0)
-runjags (2.2.2-1.1)
-scales (1.3.0)
-StanHeaders (2.32.5)
-stringi (1.8.3)
-stringr (1.5.1)
-tensorflow (2.14.0)
-testthat (3.2.1)
-tfautograph (0.3.2)
-tfruns (1.5.1)
-tibble (3.2.1)
-tidyr (1.3.0)
-tidyselect (1.2.0)
-tzdb (0.4.0)
-utf8 (1.2.4)
-vctrs (0.6.5)
-viridisLite (0.4.2)
-vroom (1.6.5)
-waldo (0.5.2)
-whisker (0.4.1)
-withr (2.5.2)
-xfun (0.41)
-yaml (2.3.8)
-yesno (0.1.2)
+abind (1.4-8)
+backports (1.5.0)
+base64enc (0.1-3)
+BH (1.84.0-0)
+brio (1.1.5)
+bslib (0.8.0)
+cachem (1.1.0)
+callr (3.7.6)
+checkmate (2.3.2)
+cli (3.6.3)
+cluster (2.1.6)
+coda (0.19-4.1)
+codetools (0.2-20)
+colorspace (2.1-1)
+commonmark (1.9.2)
+config (0.3.2)
+cpp11 (0.5.0)
+crayon (1.5.3)
+data.table (1.16.2)
+desc (1.4.3)
+diffobj (0.3.5)
+digest (0.6.37)
+distributional (0.5.0)
+dplyr (1.1.4)
+evaluate (1.0.1)
+fansi (1.0.6)
+farver (2.1.2)
+fastmap (1.2.0)
+fontawesome (0.5.2)
+forcats (1.0.0)
+foreign (0.8-87)
+Formula (1.2-5)
+fs (1.6.5)
+future (1.34.0)
+generics (0.1.3)
+GGally (2.2.1)
+ggmcmc (1.5.1.1)
+ggplot2 (3.5.1)
+ggstats (0.7.0)
+globals (0.16.3)
+glue (1.8.0)
+gridExtra (2.3)
+gtable (0.3.6)
+here (1.0.1)
+highr (0.11)
+Hmisc (5.2-0)
+hms (1.1.3)
+htmlTable (2.4.3)
+htmltools (0.5.8.1)
+htmlwidgets (1.6.4)
+igraph (2.1.1)
+inline (0.3.19)
+isoband (0.2.7)
+jquerylib (0.1.4)
+jsonlite (1.8.9)
+knitr (1.48)
+labeling (0.4.3)
+lattice (0.22-6)
+lifecycle (1.0.4)
+listenv (0.9.1)
+loo (2.8.0)
+magrittr (2.0.3)
+markdown (1.13)
+MASS (7.3-61)
+Matrix (1.7-1)
+matrixStats (1.4.1)
+memoise (2.0.1)
+mgcv (1.9-1)
+mime (0.12)
+moments (0.14.1)
+munsell (0.5.1)
+nimble (1.2.1)
+nimbleAPT (1.0.6)
+nimbleHMC (0.2.2)
+nlme (3.1-166)
+nnet (7.3-19)
+numDeriv (2016.8-1.1)
+parallelly (1.38.0)
+patchwork (1.3.0)
+pillar (1.9.0)
+pkgbuild (1.4.5)
+pkgconfig (2.0.3)
+pkgload (1.4.0)
+plyr (1.8.9)
+png (0.1-8)
+posterior (1.6.0)
+pracma (2.4.4)
+praise (1.0.0)
+prettyunits (1.2.0)
+processx (3.8.4)
+progress (1.2.3)
+ps (1.8.1)
+purrr (1.0.2)
+QuickJSR (1.4.0)
+R6 (2.5.1)
+rappdirs (0.3.3)
+RColorBrewer (1.1-3)
+Rcpp (1.0.13-1)
+RcppEigen (0.3.4.0.2)
+RcppParallel (5.1.9)
+RcppTOML (0.2.2)
+reticulate (1.39.0)
+rjags (4-16)
+rlang (1.1.4)
+rmarkdown (2.29)
+rpart (4.1.23)
+rprojroot (2.0.4)
+rstan (2.32.6)
+rstudioapi (0.17.1)
+runjags (2.2.2-4)
+sass (0.4.9)
+scales (1.3.0)
+StanHeaders (2.32.10)
+stringi (1.8.4)
+stringr (1.5.1)
+tensorA (0.36.2.1)
+tensorflow (2.16.0)
+testthat (3.2.1.1)
+tfautograph (0.3.2)
+tfruns (1.5.3)
+tibble (3.2.1)
+tidyr (1.3.1)
+tidyselect (1.2.1)
+tinytex (0.54)
+utf8 (1.2.4)
+vctrs (0.6.5)
+viridis (0.6.5)
+viridisLite (0.4.2)
+waldo (0.6.0)
+whisker (0.4.1)
+withr (3.0.2)
+xfun (0.49)
+yaml (2.3.10)
+yesno (0.1.3)
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00check.log b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00check.log
index bde7cad7..45a34797 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00check.log
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00check.log
@@ -1,15 +1,15 @@
* using log directory ‘/Users/nick/github/greta-dev/greta/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck’
-* using R version 4.3.2 (2023-10-31)
-* using platform: aarch64-apple-darwin20 (64-bit)
+* using R version 4.4.2 (2024-10-31)
+* using platform: aarch64-apple-darwin20
* R was compiled by
Apple clang version 14.0.0 (clang-1400.0.29.202)
GNU Fortran (GCC) 12.2.0
-* running under: macOS Sonoma 14.0
+* running under: macOS Sonoma 14.5
* using session charset: UTF-8
* using options ‘--no-manual --no-build-vignettes’
* checking for file ‘runMCMCbtadjust/DESCRIPTION’ ... OK
* checking extension type ... Package
-* this is package ‘runMCMCbtadjust’ version ‘1.0.5’
+* this is package ‘runMCMCbtadjust’ version ‘1.1.2’
* package encoding: UTF-8
* checking package namespace information ... OK
* checking package dependencies ... OK
@@ -23,15 +23,12 @@
* checking installed package size ... OK
* checking package directory ... OK
* checking ‘build’ directory ... OK
-* checking DESCRIPTION meta-information ... WARNING
-Non-standard license specification:
- CECILL-2.1
-Standardizable: FALSE
+* checking DESCRIPTION meta-information ... OK
* checking top-level files ... OK
* checking for left-over files ... OK
* checking index information ... OK
* checking package subdirectories ... OK
-* checking R files for non-ASCII characters ... OK
+* checking code files for non-ASCII characters ... OK
* checking R files for syntax errors ... OK
* checking whether the package can be loaded ... OK
* checking whether the package can be loaded with stated dependencies ... OK
@@ -39,7 +36,11 @@ Standardizable: FALSE
* checking whether the namespace can be loaded with stated dependencies ... OK
* checking whether the namespace can be unloaded cleanly ... OK
* checking loading without being on the library search path ... OK
-* checking dependencies in R code ... OK
+* checking dependencies in R code ... NOTE
+'library' or 'require' calls in package code:
+ ‘nimble’ ‘nimbleAPT’
+ Please use :: or requireNamespace() instead.
+ See section 'Suggested packages' in the 'Writing R Extensions' manual.
* checking S3 generic/method consistency ... OK
* checking replacement functions ... OK
* checking foreign function calls ... OK
@@ -59,9 +60,11 @@ Standardizable: FALSE
* checking tests ... OK
Running ‘testthat.R’
* checking for unstated dependencies in vignettes ... OK
-* checking package vignettes in ‘inst/doc’ ... OK
+* checking package vignettes ... OK
* checking running R code from vignettes ... NONE
- ‘runMCMCbtadjust_Presentation.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_Nimble_variations.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_Pres.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_extraCalculations.Rmd’ using ‘UTF-8’... OK
* checking re-building of vignette outputs ... SKIPPED
* DONE
-Status: 1 WARNING
+Status: 1 NOTE
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00install.out b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00install.out
index 173f6d8a..16cb286f 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00install.out
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/00install.out
@@ -6,6 +6,7 @@
** byte-compile and prepare package for lazy loading
** help
*** installing help indices
+*** copying figures
** building package indices
** installing vignettes
** testing if installed package can be loaded from temporary location
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
index ece014bd..05b5a58c 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
@@ -6,72 +6,121 @@ library('runMCMCbtadjust')
base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
cleanEx()
-nameEx("runMCMC_btadjust")
-### * runMCMC_btadjust
+nameEx("findMCMC_strong_corrs")
+### * findMCMC_strong_corrs
flush(stderr()); flush(stdout())
-### Name: runMCMC_btadjust
-### Title: runMCMC_btadjust
-### Aliases: runMCMC_btadjust
+### Name: findMCMC_strong_corrs
+### Title: findMCMC_strong_corrs
+### Aliases: findMCMC_strong_corrs
### ** Examples
- #\code{
-# for examples with Nimble or Greta, see the Vignette.
-# condition variable of whether installation is OK with Jags to avoid error durong package check
-condition_jags<-TRUE
-if (nchar(system.file(package='rjags'))==0) {condition_jags<-FALSE}
-if (nchar(system.file(package='runjags'))==0) {condition_jags<-FALSE}
-if (condition_jags)
-{suppressWarnings(temp<-runjags::testjags(silent=TRUE))
- if(!(temp$JAGS.available&temp$JAGS.found&temp$JAGS.major==4)) {condition_jags<-FALSE}}
-
-if (condition_jags) {
-#generating data
-set.seed(1)
-y1000<-rnorm(n=1000,mean=600,sd=30)
-ModelData <-list(mass = y1000,nobs = length(y1000))
+## Not run:
+##D #generating data
+##D set.seed(1)
+##D y1000<-rnorm(n=1000,mean=600,sd=30)
+##D ModelData <-list(mass = y1000,nobs = length(y1000))
+##D
+##D #writing the Jags code as a character chain in R
+##D modeltotransfer<-"model {
+##D
+##D # Priors
+##D population.mean ~ dunif(0,5000)
+##D population.sd ~ dunif(0,100)
+##D
+##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+##D population.variance <- population.sd * population.sd
+##D precision <- 1 / population.variance
+##D
+##D # Likelihood
+##D for(i in 1:nobs){
+##D mass[i] ~ dnorm(population.mean, precision)
+##D }
+##D }"
+##D
+##D #specifying the initial values
+##D ModelInits <- function()
+##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+##D params <- c("population.mean", "population.sd", "population.variance")
+##D K<-3
+##D set.seed(1)
+##D Inits<-lapply(1:K,function(x){ModelInits()})
+##D
+##D # running runMCMC_btadjust with MCMC_language="Jags":
+##D set.seed(1)
+##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+##D data=ModelData,
+##D Nchains=K, params=params, inits=Inits,
+##D niter.min=1000, niter.max=300000,
+##D nburnin.min=100, nburnin.max=200000,
+##D thin.min=1, thin.max=1000,
+##D neff.min=1000, conv.max=1.05,
+##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+##D
+##D findMCMC_strong_corrs(out.mcmc.Coda)
+## End(Not run)
-#writing the Jags code as a character chain in R
-modeltotransfer<-"model {
-# Priors
-population.mean ~ dunif(0,5000)
-population.sd ~ dunif(0,100)
-# Precision = 1/variance: Normal distribution parameterized by precision in Jags
-population.variance <- population.sd * population.sd
-precision <- 1 / population.variance
+cleanEx()
+nameEx("runMCMC_btadjust")
+### * runMCMC_btadjust
-# Likelihood
-for(i in 1:nobs){
- mass[i] ~ dnorm(population.mean, precision)
- }
- }"
+flush(stderr()); flush(stdout())
-#specifying the initial values
-ModelInits <- function()
-{list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
-params <- c("population.mean", "population.sd", "population.variance")
-K<-3
-set.seed(1)
-Inits<-lapply(1:K,function(x){ModelInits()})
+### Name: runMCMC_btadjust
+### Title: runMCMC_btadjust
+### Aliases: runMCMC_btadjust
-# running runMCMC_btadjust with MCMC_language="Jags":
-set.seed(1)
-out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
-data=ModelData,
-Nchains=K, params=params, inits=Inits,
-niter.min=1000, niter.max=300000,
-nburnin.min=100, nburnin.max=200000,
-thin.min=1, thin.max=1000,
-neff.min=1000, conv.max=1.05,
-control=list(print.diagnostics=TRUE, neff.method="Coda"))
+### ** Examples
-summary(out.mcmc.Coda)
-}
-#}
+# for examples with Nimble or Greta, see the Presentation Vignette.
+## Not run:
+##D #generating data
+##D set.seed(1)
+##D y1000<-rnorm(n=1000,mean=600,sd=30)
+##D ModelData <-list(mass = y1000,nobs = length(y1000))
+##D
+##D #writing the Jags code as a character chain in R
+##D modeltotransfer<-"model {
+##D
+##D # Priors
+##D population.mean ~ dunif(0,5000)
+##D population.sd ~ dunif(0,100)
+##D
+##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+##D population.variance <- population.sd * population.sd
+##D precision <- 1 / population.variance
+##D
+##D # Likelihood
+##D for(i in 1:nobs){
+##D mass[i] ~ dnorm(population.mean, precision)
+##D }
+##D }"
+##D
+##D #specifying the initial values
+##D ModelInits <- function()
+##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+##D params <- c("population.mean", "population.sd", "population.variance")
+##D K<-3
+##D set.seed(1)
+##D Inits<-lapply(1:K,function(x){ModelInits()})
+##D
+##D # running runMCMC_btadjust with MCMC_language="Jags":
+##D set.seed(1)
+##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+##D data=ModelData,
+##D Nchains=K, params=params, inits=Inits,
+##D niter.min=1000, niter.max=300000,
+##D nburnin.min=100, nburnin.max=200000,
+##D thin.min=1, thin.max=1000,
+##D neff.min=1000, conv.max=1.05,
+##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+##D
+##D summary(out.mcmc.Coda)
+## End(Not run)
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
index 45b73671..24136c4e 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
@@ -1,7 +1,7 @@
-R version 4.3.2 (2023-10-31) -- "Eye Holes"
-Copyright (C) 2023 The R Foundation for Statistical Computing
-Platform: aarch64-apple-darwin20 (64-bit)
+R version 4.4.2 (2024-10-31) -- "Pile of Leaves"
+Copyright (C) 2024 The R Foundation for Statistical Computing
+Platform: aarch64-apple-darwin20
R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
@@ -25,6 +25,65 @@ Type 'q()' to quit R.
> base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
> base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
> cleanEx()
+> nameEx("findMCMC_strong_corrs")
+> ### * findMCMC_strong_corrs
+>
+> flush(stderr()); flush(stdout())
+>
+> ### Name: findMCMC_strong_corrs
+> ### Title: findMCMC_strong_corrs
+> ### Aliases: findMCMC_strong_corrs
+>
+> ### ** Examples
+>
+> ## Not run:
+> ##D #generating data
+> ##D set.seed(1)
+> ##D y1000<-rnorm(n=1000,mean=600,sd=30)
+> ##D ModelData <-list(mass = y1000,nobs = length(y1000))
+> ##D
+> ##D #writing the Jags code as a character chain in R
+> ##D modeltotransfer<-"model {
+> ##D
+> ##D # Priors
+> ##D population.mean ~ dunif(0,5000)
+> ##D population.sd ~ dunif(0,100)
+> ##D
+> ##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+> ##D population.variance <- population.sd * population.sd
+> ##D precision <- 1 / population.variance
+> ##D
+> ##D # Likelihood
+> ##D for(i in 1:nobs){
+> ##D mass[i] ~ dnorm(population.mean, precision)
+> ##D }
+> ##D }"
+> ##D
+> ##D #specifying the initial values
+> ##D ModelInits <- function()
+> ##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+> ##D params <- c("population.mean", "population.sd", "population.variance")
+> ##D K<-3
+> ##D set.seed(1)
+> ##D Inits<-lapply(1:K,function(x){ModelInits()})
+> ##D
+> ##D # running runMCMC_btadjust with MCMC_language="Jags":
+> ##D set.seed(1)
+> ##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+> ##D data=ModelData,
+> ##D Nchains=K, params=params, inits=Inits,
+> ##D niter.min=1000, niter.max=300000,
+> ##D nburnin.min=100, nburnin.max=200000,
+> ##D thin.min=1, thin.max=1000,
+> ##D neff.min=1000, conv.max=1.05,
+> ##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+> ##D
+> ##D findMCMC_strong_corrs(out.mcmc.Coda)
+> ## End(Not run)
+>
+>
+>
+> cleanEx()
> nameEx("runMCMC_btadjust")
> ### * runMCMC_btadjust
>
@@ -36,111 +95,51 @@ Type 'q()' to quit R.
>
> ### ** Examples
>
-> #\code{
-> # for examples with Nimble or Greta, see the Vignette.
-> # condition variable of whether installation is OK with Jags to avoid error durong package check
-> condition_jags<-TRUE
-> if (nchar(system.file(package='rjags'))==0) {condition_jags<-FALSE}
-> if (nchar(system.file(package='runjags'))==0) {condition_jags<-FALSE}
-> if (condition_jags)
-+ {suppressWarnings(temp<-runjags::testjags(silent=TRUE))
-+ if(!(temp$JAGS.available&temp$JAGS.found&temp$JAGS.major==4)) {condition_jags<-FALSE}}
->
-> if (condition_jags) {
-+ #generating data
-+ set.seed(1)
-+ y1000<-rnorm(n=1000,mean=600,sd=30)
-+ ModelData <-list(mass = y1000,nobs = length(y1000))
-+
-+ #writing the Jags code as a character chain in R
-+ modeltotransfer<-"model {
-+
-+ # Priors
-+ population.mean ~ dunif(0,5000)
-+ population.sd ~ dunif(0,100)
-+
-+ # Precision = 1/variance: Normal distribution parameterized by precision in Jags
-+ population.variance <- population.sd * population.sd
-+ precision <- 1 / population.variance
-+
-+ # Likelihood
-+ for(i in 1:nobs){
-+ mass[i] ~ dnorm(population.mean, precision)
-+ }
-+ }"
-+
-+ #specifying the initial values
-+ ModelInits <- function()
-+ {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
-+ params <- c("population.mean", "population.sd", "population.variance")
-+ K<-3
-+ set.seed(1)
-+ Inits<-lapply(1:K,function(x){ModelInits()})
-+
-+ # running runMCMC_btadjust with MCMC_language="Jags":
-+ set.seed(1)
-+ out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
-+ data=ModelData,
-+ Nchains=K, params=params, inits=Inits,
-+ niter.min=1000, niter.max=300000,
-+ nburnin.min=100, nburnin.max=200000,
-+ thin.min=1, thin.max=1000,
-+ neff.min=1000, conv.max=1.05,
-+ control=list(print.diagnostics=TRUE, neff.method="Coda"))
-+
-+ summary(out.mcmc.Coda)
-+ }
-Compiling model graph
- Resolving undeclared variables
- Allocating nodes
-Graph information:
- Observed stochastic nodes: 1000
- Unobserved stochastic nodes: 2
- Total graph size: 1009
-
-Initializing model
-
-Cycle 1...
-[1] "###################################################################################"
-[1] "###################################################################################"
-[1] "Current state of diagnostics:"
- Nchains thin niter.tot Nvalues nu.burn
-MCMC parameters 3 1 1000 2700 1
-[1] "###################################################################################"
- max median mean name_max prop_ab_1p2 prop_ab_1p05
-Gelman_Upper_C_I 1 1 1 population.sd 0 0
-Gelman_Point_Est 1 1 1 population.sd 0 0
- prop_ab_1p01
-Gelman_Upper_C_I 0
-Gelman_Point_Est 0
-[1] "###################################################################################"
- min median mean name_min prop_bel_1000 prop_bel_5000
-Neff 1615 1787 1732 population.mean 0 1
- prop_bel_10000
-Neff 1
-[1] "###################################################################################"
-
-Iterations = 1:900
-Thinning interval = 1
-Number of chains = 3
-Sample size per chain = 900
-
-1. Empirical mean and standard deviation for each variable,
- plus standard error of the mean:
-
- Mean SD Naive SE Time-series SE
-population.mean 599.65 0.9868 0.01899 0.02478
-population.sd 31.08 0.6971 0.01342 0.01649
-population.variance 966.42 43.4011 0.83525 1.02801
-
-2. Quantiles for each variable:
-
- 2.5% 25% 50% 75% 97.5%
-population.mean 597.77 599.0 599.63 600.29 601.55
-population.sd 29.74 30.6 31.07 31.56 32.46
-population.variance 884.27 936.2 965.16 996.12 1053.78
-
-> #}
+> # for examples with Nimble or Greta, see the Presentation Vignette.
+> ## Not run:
+> ##D #generating data
+> ##D set.seed(1)
+> ##D y1000<-rnorm(n=1000,mean=600,sd=30)
+> ##D ModelData <-list(mass = y1000,nobs = length(y1000))
+> ##D
+> ##D #writing the Jags code as a character chain in R
+> ##D modeltotransfer<-"model {
+> ##D
+> ##D # Priors
+> ##D population.mean ~ dunif(0,5000)
+> ##D population.sd ~ dunif(0,100)
+> ##D
+> ##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+> ##D population.variance <- population.sd * population.sd
+> ##D precision <- 1 / population.variance
+> ##D
+> ##D # Likelihood
+> ##D for(i in 1:nobs){
+> ##D mass[i] ~ dnorm(population.mean, precision)
+> ##D }
+> ##D }"
+> ##D
+> ##D #specifying the initial values
+> ##D ModelInits <- function()
+> ##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+> ##D params <- c("population.mean", "population.sd", "population.variance")
+> ##D K<-3
+> ##D set.seed(1)
+> ##D Inits<-lapply(1:K,function(x){ModelInits()})
+> ##D
+> ##D # running runMCMC_btadjust with MCMC_language="Jags":
+> ##D set.seed(1)
+> ##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+> ##D data=ModelData,
+> ##D Nchains=K, params=params, inits=Inits,
+> ##D niter.min=1000, niter.max=300000,
+> ##D nburnin.min=100, nburnin.max=200000,
+> ##D thin.min=1, thin.max=1000,
+> ##D neff.min=1000, conv.max=1.05,
+> ##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+> ##D
+> ##D summary(out.mcmc.Coda)
+> ## End(Not run)
>
>
>
@@ -149,7 +148,7 @@ population.variance 884.27 936.2 965.16 996.12 1053.78
> cleanEx()
> options(digits = 7L)
> base::cat("Time elapsed: ", proc.time() - base::get("ptime", pos = 'CheckExEnv'),"\n")
-Time elapsed: 1.415 0.03 1.506 0.017 0.022
+Time elapsed: 0.036 0.003 0.04 0 0
> grDevices::dev.off()
null device
1
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat.Rout b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat.Rout
index 17dd8e1d..b45fd22a 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat.Rout
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat.Rout
@@ -1,7 +1,7 @@
-R version 4.3.2 (2023-10-31) -- "Eye Holes"
-Copyright (C) 2023 The R Foundation for Statistical Computing
-Platform: aarch64-apple-darwin20 (64-bit)
+R version 4.4.2 (2024-10-31) -- "Pile of Leaves"
+Copyright (C) 2024 The R Foundation for Statistical Computing
+Platform: aarch64-apple-darwin20
R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
@@ -27,7 +27,7 @@ Type 'q()' to quit R.
> library(runMCMCbtadjust)
>
> test_check("runMCMCbtadjust")
-nimble version 1.0.1 is loaded.
+nimble version 1.2.1 is loaded.
For more information on NIMBLE and a User Manual,
please visit https://R-nimble.org.
@@ -42,32 +42,43 @@ The following object is masked from 'package:stats':
simulate
-Timing stopped at: 0.004 0 0.005
-Timing stopped at: 0.003 0 0.004
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.002 0 0.002
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.005 0 0.006
-Timing stopped at: 0.002 0 0.002
-Timing stopped at: 0.003 0 0.003
+The following object is masked from 'package:base':
+
+ declare
+
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.005 0 0.005
Timing stopped at: 0.002 0 0.002
+Timing stopped at: 0.003 0 0.002
Timing stopped at: 0.002 0 0.003
Timing stopped at: 0.002 0 0.003
Timing stopped at: 0.002 0 0.002
+Timing stopped at: 0.003 0 0.003
+Timing stopped at: 0.003 0 0.003
+Timing stopped at: 0.003 0 0.002
+Timing stopped at: 0.003 0 0.002
Timing stopped at: 0.002 0 0.003
Timing stopped at: 0.003 0 0.003
-Timing stopped at: 0.003 0 0.004
+Timing stopped at: 0.002 0 0.003
+Timing stopped at: 0.003 0 0.002
+Timing stopped at: 0.002 0 0.002
+Timing stopped at: 0.003 0 0.002
+[1] "control$seed is NULL. Replaced by 1"
Timing stopped at: 0.004 0 0.005
-Timing stopped at: 0.003 0 0.004
-Timing stopped at: 0.004 0.001 0.006
-Timing stopped at: 0.005 0.001 0.005
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.004 0.001 0.004
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.004 0 0.004
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.007 0.001 0.012
+[1] "control$seed is NULL. Replaced by 1"
Timing stopped at: 0.004 0.001 0.005
-Timing stopped at: 0.004 0 0.005
-[ FAIL 0 | WARN 1 | SKIP 0 | PASS 23 ]
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.007 0.001 0.008
+[ FAIL 0 | WARN 2 | SKIP 0 | PASS 23 ]
-[ FAIL 0 | WARN 1 | SKIP 0 | PASS 23 ]
+[ FAIL 0 | WARN 2 | SKIP 0 | PASS 23 ]
>
> proc.time()
user system elapsed
- 2.619 0.085 2.729
+ 2.556 0.065 2.664
diff --git a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
index e12b9a73..54fd4b58 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
+++ b/revdep/checks.noindex/runMCMCbtadjust/new/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
@@ -1,4 +1,4 @@
-context("tunMCMC_btadjust")
+context("runMCMC_btadjust")
### minimal setup for running runMCMC_btadjust: see vignette
set.seed(1)
@@ -135,7 +135,7 @@ testthat::test_that("errors", {
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000,control=list(convType="Gelman",convtype.alpha=NULL))))
testthat::expect_error((runMCMC_btadjust(data=ModelData,constants=ModelConsts,code=ModelCode,MCMC_language="Nimble",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000,control=list(convType="Gelman",convtype.alpha=1.1))))
-
+
#should give an error because neff.method is not "Stan" or "Coda"
testthat::expect_error((runMCMC_btadjust(data=ModelData,constants=ModelConsts,code=ModelCode,MCMC_language="Nimble",
@@ -167,6 +167,7 @@ testthat::test_that("errors", {
inits=Inits,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
+
})}
@@ -183,7 +184,7 @@ testthat::test_that("errors", {
testthat::expect_error((runMCMC_btadjust(data=ModelData.Jags,MCMC_language="Jags",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
-
+
#should give an error because code is not of type character with MCMC_language="Jags"
testthat::expect_error((runMCMC_btadjust(data=ModelData.Jags,code=1,MCMC_language="Jags",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/libraries.txt b/revdep/checks.noindex/runMCMCbtadjust/old/libraries.txt
index 57e7707d..811f0443 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/libraries.txt
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/libraries.txt
@@ -1,123 +1,180 @@
Library: /Users/nick/github/greta-dev/greta/revdep/library.noindex/greta/old
-greta (0.4.3)
+abind (1.4-8)
+backports (1.5.0)
+base64enc (0.1-3)
+callr (3.7.6)
+cli (3.6.3)
+coda (0.19-4.1)
+config (0.3.2)
+crayon (1.5.3)
+digest (0.6.37)
+future (1.34.0)
+globals (0.16.3)
+glue (1.8.0)
+greta (0.4.5)
+here (1.0.1)
+hms (1.1.3)
+jsonlite (1.8.9)
+lifecycle (1.0.4)
+listenv (0.9.1)
+magrittr (2.0.3)
+parallelly (1.38.0)
+pkgconfig (2.0.3)
+png (0.1-8)
+prettyunits (1.2.0)
+processx (3.8.4)
+progress (1.2.3)
+ps (1.8.1)
+R6 (2.5.1)
+rappdirs (0.3.3)
+Rcpp (1.0.13-1)
+RcppTOML (0.2.2)
+reticulate (1.39.0)
+rlang (1.1.4)
+rprojroot (2.0.4)
+rstudioapi (0.17.1)
+tensorflow (2.16.0)
+tfautograph (0.3.2)
+tfruns (1.5.3)
+tidyselect (1.2.1)
+vctrs (0.6.5)
+whisker (0.4.1)
+withr (3.0.2)
+yaml (2.3.10)
+yesno (0.1.3)
Library: /Users/nick/github/greta-dev/greta/revdep/library.noindex/runMCMCbtadjust
-abind (1.4-5)
-backports (1.4.1)
-base64enc (0.1-3)
-BH (1.84.0-0)
-bit (4.0.5)
-bit64 (4.0.5)
-brio (1.1.4)
-broom (1.0.5)
-broom.helpers (1.14.0)
-callr (3.7.3)
-checkmate (2.3.1)
-cli (3.6.2)
-clipr (0.8.0)
-coda (0.19-4)
-codetools (0.2-19)
-colorspace (2.1-0)
-commonmark (1.9.0)
-config (0.3.2)
-cpp11 (0.4.7)
-crayon (1.5.2)
-desc (1.4.3)
-diffobj (0.3.5)
-digest (0.6.34)
-dplyr (1.1.4)
-ellipsis (0.3.2)
-evaluate (0.23)
-fansi (1.0.6)
-farver (2.1.1)
-forcats (1.0.0)
-fs (1.6.3)
-future (1.33.1)
-generics (0.1.3)
-GGally (2.2.0)
-ggmcmc (1.5.1.1)
-ggplot2 (3.4.4)
-ggstats (0.5.1)
-globals (0.16.2)
-glue (1.7.0)
-gridExtra (2.3)
-gtable (0.3.4)
-haven (2.5.4)
-here (1.0.1)
-highr (0.10)
-hms (1.1.3)
-igraph (1.6.0)
-inline (0.3.19)
-isoband (0.2.7)
-jsonlite (1.8.8)
-knitr (1.45)
-labeling (0.4.3)
-labelled (2.12.0)
-lattice (0.22-5)
-lifecycle (1.0.4)
-listenv (0.9.0)
-loo (2.6.0)
-magrittr (2.0.3)
-markdown (1.12)
-MASS (7.3-60.0.1)
-Matrix (1.6-5)
-matrixStats (1.2.0)
-mgcv (1.9-1)
-munsell (0.5.0)
-nimble (1.0.1)
-nlme (3.1-164)
-numDeriv (2016.8-1.1)
-parallelly (1.36.0)
-patchwork (1.2.0)
-pillar (1.9.0)
-pkgbuild (1.4.3)
-pkgconfig (2.0.3)
-pkgload (1.3.3)
-plyr (1.8.9)
-png (0.1-8)
-pracma (2.4.4)
-praise (1.0.0)
-prettyunits (1.2.0)
-processx (3.8.3)
-progress (1.2.3)
-ps (1.7.5)
-purrr (1.0.2)
-QuickJSR (1.0.9)
-R6 (2.5.1)
-rappdirs (0.3.3)
-RColorBrewer (1.1-3)
-Rcpp (1.0.12)
-RcppEigen (0.3.3.9.4)
-RcppParallel (5.1.7)
-RcppTOML (0.2.2)
-readr (2.1.5)
-rematch2 (2.1.2)
-reticulate (1.34.0)
-rjags (4-15)
-rlang (1.1.3)
-rprojroot (2.0.4)
-rstan (2.32.5)
-rstudioapi (0.15.0)
-runjags (2.2.2-1.1)
-scales (1.3.0)
-StanHeaders (2.32.5)
-stringi (1.8.3)
-stringr (1.5.1)
-tensorflow (2.14.0)
-testthat (3.2.1)
-tfautograph (0.3.2)
-tfruns (1.5.1)
-tibble (3.2.1)
-tidyr (1.3.0)
-tidyselect (1.2.0)
-tzdb (0.4.0)
-utf8 (1.2.4)
-vctrs (0.6.5)
-viridisLite (0.4.2)
-vroom (1.6.5)
-waldo (0.5.2)
-whisker (0.4.1)
-withr (2.5.2)
-xfun (0.41)
-yaml (2.3.8)
-yesno (0.1.2)
+abind (1.4-8)
+backports (1.5.0)
+base64enc (0.1-3)
+BH (1.84.0-0)
+brio (1.1.5)
+bslib (0.8.0)
+cachem (1.1.0)
+callr (3.7.6)
+checkmate (2.3.2)
+cli (3.6.3)
+cluster (2.1.6)
+coda (0.19-4.1)
+codetools (0.2-20)
+colorspace (2.1-1)
+commonmark (1.9.2)
+config (0.3.2)
+cpp11 (0.5.0)
+crayon (1.5.3)
+data.table (1.16.2)
+desc (1.4.3)
+diffobj (0.3.5)
+digest (0.6.37)
+distributional (0.5.0)
+dplyr (1.1.4)
+evaluate (1.0.1)
+fansi (1.0.6)
+farver (2.1.2)
+fastmap (1.2.0)
+fontawesome (0.5.2)
+forcats (1.0.0)
+foreign (0.8-87)
+Formula (1.2-5)
+fs (1.6.5)
+future (1.34.0)
+generics (0.1.3)
+GGally (2.2.1)
+ggmcmc (1.5.1.1)
+ggplot2 (3.5.1)
+ggstats (0.7.0)
+globals (0.16.3)
+glue (1.8.0)
+gridExtra (2.3)
+gtable (0.3.6)
+here (1.0.1)
+highr (0.11)
+Hmisc (5.2-0)
+hms (1.1.3)
+htmlTable (2.4.3)
+htmltools (0.5.8.1)
+htmlwidgets (1.6.4)
+igraph (2.1.1)
+inline (0.3.19)
+isoband (0.2.7)
+jquerylib (0.1.4)
+jsonlite (1.8.9)
+knitr (1.48)
+labeling (0.4.3)
+lattice (0.22-6)
+lifecycle (1.0.4)
+listenv (0.9.1)
+loo (2.8.0)
+magrittr (2.0.3)
+markdown (1.13)
+MASS (7.3-61)
+Matrix (1.7-1)
+matrixStats (1.4.1)
+memoise (2.0.1)
+mgcv (1.9-1)
+mime (0.12)
+moments (0.14.1)
+munsell (0.5.1)
+nimble (1.2.1)
+nimbleAPT (1.0.6)
+nimbleHMC (0.2.2)
+nlme (3.1-166)
+nnet (7.3-19)
+numDeriv (2016.8-1.1)
+parallelly (1.38.0)
+patchwork (1.3.0)
+pillar (1.9.0)
+pkgbuild (1.4.5)
+pkgconfig (2.0.3)
+pkgload (1.4.0)
+plyr (1.8.9)
+png (0.1-8)
+posterior (1.6.0)
+pracma (2.4.4)
+praise (1.0.0)
+prettyunits (1.2.0)
+processx (3.8.4)
+progress (1.2.3)
+ps (1.8.1)
+purrr (1.0.2)
+QuickJSR (1.4.0)
+R6 (2.5.1)
+rappdirs (0.3.3)
+RColorBrewer (1.1-3)
+Rcpp (1.0.13-1)
+RcppEigen (0.3.4.0.2)
+RcppParallel (5.1.9)
+RcppTOML (0.2.2)
+reticulate (1.39.0)
+rjags (4-16)
+rlang (1.1.4)
+rmarkdown (2.29)
+rpart (4.1.23)
+rprojroot (2.0.4)
+rstan (2.32.6)
+rstudioapi (0.17.1)
+runjags (2.2.2-4)
+sass (0.4.9)
+scales (1.3.0)
+StanHeaders (2.32.10)
+stringi (1.8.4)
+stringr (1.5.1)
+tensorA (0.36.2.1)
+tensorflow (2.16.0)
+testthat (3.2.1.1)
+tfautograph (0.3.2)
+tfruns (1.5.3)
+tibble (3.2.1)
+tidyr (1.3.1)
+tidyselect (1.2.1)
+tinytex (0.54)
+utf8 (1.2.4)
+vctrs (0.6.5)
+viridis (0.6.5)
+viridisLite (0.4.2)
+waldo (0.6.0)
+whisker (0.4.1)
+withr (3.0.2)
+xfun (0.49)
+yaml (2.3.10)
+yesno (0.1.3)
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00check.log b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00check.log
index 8bdac30b..2872b838 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00check.log
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00check.log
@@ -1,15 +1,15 @@
* using log directory ‘/Users/nick/github/greta-dev/greta/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck’
-* using R version 4.3.2 (2023-10-31)
-* using platform: aarch64-apple-darwin20 (64-bit)
+* using R version 4.4.2 (2024-10-31)
+* using platform: aarch64-apple-darwin20
* R was compiled by
Apple clang version 14.0.0 (clang-1400.0.29.202)
GNU Fortran (GCC) 12.2.0
-* running under: macOS Sonoma 14.0
+* running under: macOS Sonoma 14.5
* using session charset: UTF-8
* using options ‘--no-manual --no-build-vignettes’
* checking for file ‘runMCMCbtadjust/DESCRIPTION’ ... OK
* checking extension type ... Package
-* this is package ‘runMCMCbtadjust’ version ‘1.0.5’
+* this is package ‘runMCMCbtadjust’ version ‘1.1.2’
* package encoding: UTF-8
* checking package namespace information ... OK
* checking package dependencies ... OK
@@ -23,15 +23,12 @@
* checking installed package size ... OK
* checking package directory ... OK
* checking ‘build’ directory ... OK
-* checking DESCRIPTION meta-information ... WARNING
-Non-standard license specification:
- CECILL-2.1
-Standardizable: FALSE
+* checking DESCRIPTION meta-information ... OK
* checking top-level files ... OK
* checking for left-over files ... OK
* checking index information ... OK
* checking package subdirectories ... OK
-* checking R files for non-ASCII characters ... OK
+* checking code files for non-ASCII characters ... OK
* checking R files for syntax errors ... OK
* checking whether the package can be loaded ... OK
* checking whether the package can be loaded with stated dependencies ... OK
@@ -39,7 +36,11 @@ Standardizable: FALSE
* checking whether the namespace can be loaded with stated dependencies ... OK
* checking whether the namespace can be unloaded cleanly ... OK
* checking loading without being on the library search path ... OK
-* checking dependencies in R code ... OK
+* checking dependencies in R code ... NOTE
+'library' or 'require' calls in package code:
+ ‘nimble’ ‘nimbleAPT’
+ Please use :: or requireNamespace() instead.
+ See section 'Suggested packages' in the 'Writing R Extensions' manual.
* checking S3 generic/method consistency ... OK
* checking replacement functions ... OK
* checking foreign function calls ... OK
@@ -59,9 +60,11 @@ Standardizable: FALSE
* checking tests ... OK
Running ‘testthat.R’
* checking for unstated dependencies in vignettes ... OK
-* checking package vignettes in ‘inst/doc’ ... OK
+* checking package vignettes ... OK
* checking running R code from vignettes ... NONE
- ‘runMCMCbtadjust_Presentation.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_Nimble_variations.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_Pres.Rmd’ using ‘UTF-8’... OK
+ ‘runMCMCbtadjust_extraCalculations.Rmd’ using ‘UTF-8’... OK
* checking re-building of vignette outputs ... SKIPPED
* DONE
-Status: 1 WARNING
+Status: 1 NOTE
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00install.out b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00install.out
index 173f6d8a..16cb286f 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00install.out
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/00install.out
@@ -6,6 +6,7 @@
** byte-compile and prepare package for lazy loading
** help
*** installing help indices
+*** copying figures
** building package indices
** installing vignettes
** testing if installed package can be loaded from temporary location
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
index ece014bd..05b5a58c 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.R
@@ -6,72 +6,121 @@ library('runMCMCbtadjust')
base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
cleanEx()
-nameEx("runMCMC_btadjust")
-### * runMCMC_btadjust
+nameEx("findMCMC_strong_corrs")
+### * findMCMC_strong_corrs
flush(stderr()); flush(stdout())
-### Name: runMCMC_btadjust
-### Title: runMCMC_btadjust
-### Aliases: runMCMC_btadjust
+### Name: findMCMC_strong_corrs
+### Title: findMCMC_strong_corrs
+### Aliases: findMCMC_strong_corrs
### ** Examples
- #\code{
-# for examples with Nimble or Greta, see the Vignette.
-# condition variable of whether installation is OK with Jags to avoid error durong package check
-condition_jags<-TRUE
-if (nchar(system.file(package='rjags'))==0) {condition_jags<-FALSE}
-if (nchar(system.file(package='runjags'))==0) {condition_jags<-FALSE}
-if (condition_jags)
-{suppressWarnings(temp<-runjags::testjags(silent=TRUE))
- if(!(temp$JAGS.available&temp$JAGS.found&temp$JAGS.major==4)) {condition_jags<-FALSE}}
-
-if (condition_jags) {
-#generating data
-set.seed(1)
-y1000<-rnorm(n=1000,mean=600,sd=30)
-ModelData <-list(mass = y1000,nobs = length(y1000))
+## Not run:
+##D #generating data
+##D set.seed(1)
+##D y1000<-rnorm(n=1000,mean=600,sd=30)
+##D ModelData <-list(mass = y1000,nobs = length(y1000))
+##D
+##D #writing the Jags code as a character chain in R
+##D modeltotransfer<-"model {
+##D
+##D # Priors
+##D population.mean ~ dunif(0,5000)
+##D population.sd ~ dunif(0,100)
+##D
+##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+##D population.variance <- population.sd * population.sd
+##D precision <- 1 / population.variance
+##D
+##D # Likelihood
+##D for(i in 1:nobs){
+##D mass[i] ~ dnorm(population.mean, precision)
+##D }
+##D }"
+##D
+##D #specifying the initial values
+##D ModelInits <- function()
+##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+##D params <- c("population.mean", "population.sd", "population.variance")
+##D K<-3
+##D set.seed(1)
+##D Inits<-lapply(1:K,function(x){ModelInits()})
+##D
+##D # running runMCMC_btadjust with MCMC_language="Jags":
+##D set.seed(1)
+##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+##D data=ModelData,
+##D Nchains=K, params=params, inits=Inits,
+##D niter.min=1000, niter.max=300000,
+##D nburnin.min=100, nburnin.max=200000,
+##D thin.min=1, thin.max=1000,
+##D neff.min=1000, conv.max=1.05,
+##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+##D
+##D findMCMC_strong_corrs(out.mcmc.Coda)
+## End(Not run)
-#writing the Jags code as a character chain in R
-modeltotransfer<-"model {
-# Priors
-population.mean ~ dunif(0,5000)
-population.sd ~ dunif(0,100)
-# Precision = 1/variance: Normal distribution parameterized by precision in Jags
-population.variance <- population.sd * population.sd
-precision <- 1 / population.variance
+cleanEx()
+nameEx("runMCMC_btadjust")
+### * runMCMC_btadjust
-# Likelihood
-for(i in 1:nobs){
- mass[i] ~ dnorm(population.mean, precision)
- }
- }"
+flush(stderr()); flush(stdout())
-#specifying the initial values
-ModelInits <- function()
-{list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
-params <- c("population.mean", "population.sd", "population.variance")
-K<-3
-set.seed(1)
-Inits<-lapply(1:K,function(x){ModelInits()})
+### Name: runMCMC_btadjust
+### Title: runMCMC_btadjust
+### Aliases: runMCMC_btadjust
-# running runMCMC_btadjust with MCMC_language="Jags":
-set.seed(1)
-out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
-data=ModelData,
-Nchains=K, params=params, inits=Inits,
-niter.min=1000, niter.max=300000,
-nburnin.min=100, nburnin.max=200000,
-thin.min=1, thin.max=1000,
-neff.min=1000, conv.max=1.05,
-control=list(print.diagnostics=TRUE, neff.method="Coda"))
+### ** Examples
-summary(out.mcmc.Coda)
-}
-#}
+# for examples with Nimble or Greta, see the Presentation Vignette.
+## Not run:
+##D #generating data
+##D set.seed(1)
+##D y1000<-rnorm(n=1000,mean=600,sd=30)
+##D ModelData <-list(mass = y1000,nobs = length(y1000))
+##D
+##D #writing the Jags code as a character chain in R
+##D modeltotransfer<-"model {
+##D
+##D # Priors
+##D population.mean ~ dunif(0,5000)
+##D population.sd ~ dunif(0,100)
+##D
+##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+##D population.variance <- population.sd * population.sd
+##D precision <- 1 / population.variance
+##D
+##D # Likelihood
+##D for(i in 1:nobs){
+##D mass[i] ~ dnorm(population.mean, precision)
+##D }
+##D }"
+##D
+##D #specifying the initial values
+##D ModelInits <- function()
+##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+##D params <- c("population.mean", "population.sd", "population.variance")
+##D K<-3
+##D set.seed(1)
+##D Inits<-lapply(1:K,function(x){ModelInits()})
+##D
+##D # running runMCMC_btadjust with MCMC_language="Jags":
+##D set.seed(1)
+##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+##D data=ModelData,
+##D Nchains=K, params=params, inits=Inits,
+##D niter.min=1000, niter.max=300000,
+##D nburnin.min=100, nburnin.max=200000,
+##D thin.min=1, thin.max=1000,
+##D neff.min=1000, conv.max=1.05,
+##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+##D
+##D summary(out.mcmc.Coda)
+## End(Not run)
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
index 516ebe79..df945283 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/runMCMCbtadjust-Ex.Rout
@@ -1,7 +1,7 @@
-R version 4.3.2 (2023-10-31) -- "Eye Holes"
-Copyright (C) 2023 The R Foundation for Statistical Computing
-Platform: aarch64-apple-darwin20 (64-bit)
+R version 4.4.2 (2024-10-31) -- "Pile of Leaves"
+Copyright (C) 2024 The R Foundation for Statistical Computing
+Platform: aarch64-apple-darwin20
R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
@@ -25,6 +25,65 @@ Type 'q()' to quit R.
> base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
> base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
> cleanEx()
+> nameEx("findMCMC_strong_corrs")
+> ### * findMCMC_strong_corrs
+>
+> flush(stderr()); flush(stdout())
+>
+> ### Name: findMCMC_strong_corrs
+> ### Title: findMCMC_strong_corrs
+> ### Aliases: findMCMC_strong_corrs
+>
+> ### ** Examples
+>
+> ## Not run:
+> ##D #generating data
+> ##D set.seed(1)
+> ##D y1000<-rnorm(n=1000,mean=600,sd=30)
+> ##D ModelData <-list(mass = y1000,nobs = length(y1000))
+> ##D
+> ##D #writing the Jags code as a character chain in R
+> ##D modeltotransfer<-"model {
+> ##D
+> ##D # Priors
+> ##D population.mean ~ dunif(0,5000)
+> ##D population.sd ~ dunif(0,100)
+> ##D
+> ##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+> ##D population.variance <- population.sd * population.sd
+> ##D precision <- 1 / population.variance
+> ##D
+> ##D # Likelihood
+> ##D for(i in 1:nobs){
+> ##D mass[i] ~ dnorm(population.mean, precision)
+> ##D }
+> ##D }"
+> ##D
+> ##D #specifying the initial values
+> ##D ModelInits <- function()
+> ##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+> ##D params <- c("population.mean", "population.sd", "population.variance")
+> ##D K<-3
+> ##D set.seed(1)
+> ##D Inits<-lapply(1:K,function(x){ModelInits()})
+> ##D
+> ##D # running runMCMC_btadjust with MCMC_language="Jags":
+> ##D set.seed(1)
+> ##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+> ##D data=ModelData,
+> ##D Nchains=K, params=params, inits=Inits,
+> ##D niter.min=1000, niter.max=300000,
+> ##D nburnin.min=100, nburnin.max=200000,
+> ##D thin.min=1, thin.max=1000,
+> ##D neff.min=1000, conv.max=1.05,
+> ##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+> ##D
+> ##D findMCMC_strong_corrs(out.mcmc.Coda)
+> ## End(Not run)
+>
+>
+>
+> cleanEx()
> nameEx("runMCMC_btadjust")
> ### * runMCMC_btadjust
>
@@ -36,111 +95,51 @@ Type 'q()' to quit R.
>
> ### ** Examples
>
-> #\code{
-> # for examples with Nimble or Greta, see the Vignette.
-> # condition variable of whether installation is OK with Jags to avoid error durong package check
-> condition_jags<-TRUE
-> if (nchar(system.file(package='rjags'))==0) {condition_jags<-FALSE}
-> if (nchar(system.file(package='runjags'))==0) {condition_jags<-FALSE}
-> if (condition_jags)
-+ {suppressWarnings(temp<-runjags::testjags(silent=TRUE))
-+ if(!(temp$JAGS.available&temp$JAGS.found&temp$JAGS.major==4)) {condition_jags<-FALSE}}
->
-> if (condition_jags) {
-+ #generating data
-+ set.seed(1)
-+ y1000<-rnorm(n=1000,mean=600,sd=30)
-+ ModelData <-list(mass = y1000,nobs = length(y1000))
-+
-+ #writing the Jags code as a character chain in R
-+ modeltotransfer<-"model {
-+
-+ # Priors
-+ population.mean ~ dunif(0,5000)
-+ population.sd ~ dunif(0,100)
-+
-+ # Precision = 1/variance: Normal distribution parameterized by precision in Jags
-+ population.variance <- population.sd * population.sd
-+ precision <- 1 / population.variance
-+
-+ # Likelihood
-+ for(i in 1:nobs){
-+ mass[i] ~ dnorm(population.mean, precision)
-+ }
-+ }"
-+
-+ #specifying the initial values
-+ ModelInits <- function()
-+ {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
-+ params <- c("population.mean", "population.sd", "population.variance")
-+ K<-3
-+ set.seed(1)
-+ Inits<-lapply(1:K,function(x){ModelInits()})
-+
-+ # running runMCMC_btadjust with MCMC_language="Jags":
-+ set.seed(1)
-+ out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
-+ data=ModelData,
-+ Nchains=K, params=params, inits=Inits,
-+ niter.min=1000, niter.max=300000,
-+ nburnin.min=100, nburnin.max=200000,
-+ thin.min=1, thin.max=1000,
-+ neff.min=1000, conv.max=1.05,
-+ control=list(print.diagnostics=TRUE, neff.method="Coda"))
-+
-+ summary(out.mcmc.Coda)
-+ }
-Compiling model graph
- Resolving undeclared variables
- Allocating nodes
-Graph information:
- Observed stochastic nodes: 1000
- Unobserved stochastic nodes: 2
- Total graph size: 1009
-
-Initializing model
-
-Cycle 1...
-[1] "###################################################################################"
-[1] "###################################################################################"
-[1] "Current state of diagnostics:"
- Nchains thin niter.tot Nvalues nu.burn
-MCMC parameters 3 1 1000 2700 1
-[1] "###################################################################################"
- max median mean name_max prop_ab_1p2 prop_ab_1p05
-Gelman_Upper_C_I 1.001 1.001 1.001 population.mean 0 0
-Gelman_Point_Est 1.000 1.000 1.000 population.mean 0 0
- prop_ab_1p01
-Gelman_Upper_C_I 0
-Gelman_Point_Est 0
-[1] "###################################################################################"
- min median mean name_min prop_bel_1000
-Neff 1773 1778 1787 population.variance 0
- prop_bel_5000 prop_bel_10000
-Neff 1 1
-[1] "###################################################################################"
-
-Iterations = 1:900
-Thinning interval = 1
-Number of chains = 3
-Sample size per chain = 900
-
-1. Empirical mean and standard deviation for each variable,
- plus standard error of the mean:
-
- Mean SD Naive SE Time-series SE
-population.mean 599.67 0.9941 0.01913 0.02344
-population.sd 31.07 0.6850 0.01318 0.01633
-population.variance 965.96 42.6060 0.81995 1.01695
-
-2. Quantiles for each variable:
-
- 2.5% 25% 50% 75% 97.5%
-population.mean 597.70 598.99 599.66 600.35 601.57
-population.sd 29.76 30.59 31.07 31.53 32.42
-population.variance 885.53 935.97 965.40 994.14 1051.32
-
-> #}
+> # for examples with Nimble or Greta, see the Presentation Vignette.
+> ## Not run:
+> ##D #generating data
+> ##D set.seed(1)
+> ##D y1000<-rnorm(n=1000,mean=600,sd=30)
+> ##D ModelData <-list(mass = y1000,nobs = length(y1000))
+> ##D
+> ##D #writing the Jags code as a character chain in R
+> ##D modeltotransfer<-"model {
+> ##D
+> ##D # Priors
+> ##D population.mean ~ dunif(0,5000)
+> ##D population.sd ~ dunif(0,100)
+> ##D
+> ##D # Precision = 1/variance: Normal distribution parameterized by precision in Jags
+> ##D population.variance <- population.sd * population.sd
+> ##D precision <- 1 / population.variance
+> ##D
+> ##D # Likelihood
+> ##D for(i in 1:nobs){
+> ##D mass[i] ~ dnorm(population.mean, precision)
+> ##D }
+> ##D }"
+> ##D
+> ##D #specifying the initial values
+> ##D ModelInits <- function()
+> ##D {list (population.mean = rnorm(1,600,90), population.sd = runif(1, 1, 30))}
+> ##D params <- c("population.mean", "population.sd", "population.variance")
+> ##D K<-3
+> ##D set.seed(1)
+> ##D Inits<-lapply(1:K,function(x){ModelInits()})
+> ##D
+> ##D # running runMCMC_btadjust with MCMC_language="Jags":
+> ##D set.seed(1)
+> ##D out.mcmc.Coda<-runMCMC_btadjust(MCMC_language="Jags", code=modeltotransfer,
+> ##D data=ModelData,
+> ##D Nchains=K, params=params, inits=Inits,
+> ##D niter.min=1000, niter.max=300000,
+> ##D nburnin.min=100, nburnin.max=200000,
+> ##D thin.min=1, thin.max=1000,
+> ##D neff.min=1000, conv.max=1.05,
+> ##D control=list(print.diagnostics=TRUE, neff.method="Coda"))
+> ##D
+> ##D summary(out.mcmc.Coda)
+> ## End(Not run)
>
>
>
@@ -149,7 +148,7 @@ population.variance 885.53 935.97 965.40 994.14 1051.32
> cleanEx()
> options(digits = 7L)
> base::cat("Time elapsed: ", proc.time() - base::get("ptime", pos = 'CheckExEnv'),"\n")
-Time elapsed: 1.481 0.042 15.239 0.021 0.035
+Time elapsed: 0.035 0.003 0.041 0 0
> grDevices::dev.off()
null device
1
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat.Rout b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat.Rout
index bc3aaa3d..29f60329 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat.Rout
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat.Rout
@@ -1,7 +1,7 @@
-R version 4.3.2 (2023-10-31) -- "Eye Holes"
-Copyright (C) 2023 The R Foundation for Statistical Computing
-Platform: aarch64-apple-darwin20 (64-bit)
+R version 4.4.2 (2024-10-31) -- "Pile of Leaves"
+Copyright (C) 2024 The R Foundation for Statistical Computing
+Platform: aarch64-apple-darwin20
R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
@@ -27,7 +27,7 @@ Type 'q()' to quit R.
> library(runMCMCbtadjust)
>
> test_check("runMCMCbtadjust")
-nimble version 1.0.1 is loaded.
+nimble version 1.2.1 is loaded.
For more information on NIMBLE and a User Manual,
please visit https://R-nimble.org.
@@ -42,32 +42,43 @@ The following object is masked from 'package:stats':
simulate
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.004 0 0.005
-Timing stopped at: 0.005 0 0.005
+The following object is masked from 'package:base':
+
+ declare
+
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.004 0.001 0.006
+Timing stopped at: 0.003 0 0.003
+Timing stopped at: 0.003 0 0.003
Timing stopped at: 0.002 0 0.002
-Timing stopped at: 0.004 0 0.005
-Timing stopped at: 0.005 0 0.006
-Timing stopped at: 0.004 0 0.004
Timing stopped at: 0.003 0 0.002
-Timing stopped at: 0.002 0 0.002
Timing stopped at: 0.003 0 0.002
-Timing stopped at: 0.002 0 0.002
Timing stopped at: 0.003 0 0.003
+Timing stopped at: 0.003 0 0.003
+Timing stopped at: 0.003 0 0.002
Timing stopped at: 0.002 0 0.003
Timing stopped at: 0.002 0 0.003
+Timing stopped at: 0.003 0.001 0.003
+Timing stopped at: 0.004 0.001 0.01
+Timing stopped at: 0.003 0 0.002
Timing stopped at: 0.003 0 0.003
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.004 0 0.004
-Timing stopped at: 0.005 0.001 0.005
-Timing stopped at: 0.005 0 0.005
+Timing stopped at: 0.003 0 0.003
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.004 0.001 0.004
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.005 0 0.007
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.004 0 0.005
+[1] "control$seed is NULL. Replaced by 1"
Timing stopped at: 0.004 0.001 0.004
-Timing stopped at: 0.003 0.001 0.004
+[1] "control$seed is NULL. Replaced by 1"
Timing stopped at: 0.004 0 0.004
-[ FAIL 0 | WARN 1 | SKIP 0 | PASS 23 ]
+[1] "control$seed is NULL. Replaced by 1"
+Timing stopped at: 0.006 0 0.008
+[ FAIL 0 | WARN 2 | SKIP 0 | PASS 23 ]
-[ FAIL 0 | WARN 1 | SKIP 0 | PASS 23 ]
+[ FAIL 0 | WARN 2 | SKIP 0 | PASS 23 ]
>
> proc.time()
user system elapsed
- 2.659 0.107 3.318
+ 2.624 0.088 3.383
diff --git a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
index e12b9a73..54fd4b58 100644
--- a/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
+++ b/revdep/checks.noindex/runMCMCbtadjust/old/runMCMCbtadjust.Rcheck/tests/testthat/test-runMCMC_btadjust.R
@@ -1,4 +1,4 @@
-context("tunMCMC_btadjust")
+context("runMCMC_btadjust")
### minimal setup for running runMCMC_btadjust: see vignette
set.seed(1)
@@ -135,7 +135,7 @@ testthat::test_that("errors", {
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000,control=list(convType="Gelman",convtype.alpha=NULL))))
testthat::expect_error((runMCMC_btadjust(data=ModelData,constants=ModelConsts,code=ModelCode,MCMC_language="Nimble",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000,control=list(convType="Gelman",convtype.alpha=1.1))))
-
+
#should give an error because neff.method is not "Stan" or "Coda"
testthat::expect_error((runMCMC_btadjust(data=ModelData,constants=ModelConsts,code=ModelCode,MCMC_language="Nimble",
@@ -167,6 +167,7 @@ testthat::test_that("errors", {
inits=Inits,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
+
})}
@@ -183,7 +184,7 @@ testthat::test_that("errors", {
testthat::expect_error((runMCMC_btadjust(data=ModelData.Jags,MCMC_language="Jags",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
-
+
#should give an error because code is not of type character with MCMC_language="Jags"
testthat::expect_error((runMCMC_btadjust(data=ModelData.Jags,code=1,MCMC_language="Jags",
inits=Inits,params=params,neff.min=150,conv.max=1.05,Nchains=2,niter.max=2000)))
diff --git a/revdep/library.noindex/greta/new/Rcpp/DESCRIPTION b/revdep/library.noindex/greta/new/Rcpp/DESCRIPTION
index afdd5da5..64bc27f5 100644
--- a/revdep/library.noindex/greta/new/Rcpp/DESCRIPTION
+++ b/revdep/library.noindex/greta/new/Rcpp/DESCRIPTION
@@ -1,10 +1,23 @@
Package: Rcpp
Title: Seamless R and C++ Integration
-Version: 1.0.12
-Date: 2024-01-08
-Author: Dirk Eddelbuettel, Romain Francois, JJ Allaire, Kevin Ushey, Qiang Kou,
- Nathan Russell, Inaki Ucar, Douglas Bates and John Chambers
-Maintainer: Dirk Eddelbuettel Although this does not give an error, we will find that the inner layer of futures will be processed sequentially just as if we would use Now, we could imagine that we process the outer layer with, say, two parallel futures, and then the inner layer with four parallel futures. In that case, we would end up running on at most eight cores (= 2 * 4). This can be achieved by forcing a fixed number of workers at each layer: When using this approach, there is a risk of setting up too many concurrent workers. To make sure the setup respects When using this approach, there is a risk of setting up too many concurrent workers. Because Futureverse has a built-in protection, we need to declare nested workers using the As-Is However, before using nested parallelization on a single machine, make sure it is actually more efficient than using parallelization in only one of the layers. This will cause up to 24 (= 3*8) samples to be processed in parallel each processing two chromosomes at the same time. With this, each node will use at most 50% of the cores available.
@@ -321,7 +321,7 @@ In this case, node Seamless R and C++ Integration
Documentation for package ‘Rcpp’ version 1.0.12
+Documentation for package ‘Rcpp’ version 1.0.13-1
second)->size() ;
}
Rcpp::CharacterVector out(n) ;
it = vec_methods.begin() ;
- int k = 0 ;
- for( int i=0; isecond)->size() ;
std::string name = it->first ;
- for( int j=0; jsecond)->size() ;
std::string name = it->first ;
typename vec_signed_method::iterator m_it = (it->second)->begin() ;
- for( int j=0; jsecond)->size() ;
}
Rcpp::CharacterVector mnames(n) ;
Rcpp::LogicalVector res(n) ;
it = vec_methods.begin() ;
- int k = 0 ;
- for( int i=0; isecond)->size() ;
std::string name = it->first ;
typename vec_signed_method::iterator m_it = (it->second)->begin() ;
- for( int j=0; jBuilt-in protection a
plan(list(multisession, sequential))
. This behavior is due to the built-in protection against nested parallelism. If both layers would run in parallel, each using the 8 cores available on the machine, we would be running 8 * 8 = 64 parallel processes - that would for sure overload our computer. What happens internally is that for the outer layer, availableCores()
equals eight (8), whereas for the inner layer it equals one (1).plan(list(tweak(multisession, workers = 2), tweak(multisession, workers = 4)))
+
-plan(list(tweak(multisession, workers = 2), tweak(multisession, workers = I(4))))
availableCores()
, use something like:I(.)
function, which basically tells the parallel framework “trust us, we know what we are doing”. To minimize the risk of mistakes and to make sure our setup respects availableCores()
, use something like:plan(list(
tweak(multisession, workers = availableCores() %/% 4),
- tweak(multisession, workers = 4)
+ tweak(multisession, workers = I(4))
))
An ad-hoc compute cluster
nodes <- rep(c("n1", "n2", "n3"), each = 8)
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = 2)
+ tweak(multisession, workers = I(2))
))
Example: Adj
halfCores <- function() { max(1, round(0.5 * availableCores()))
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = halfCores)
+ tweak(multisession, workers = I(halfCores))
))
Example: Adj
}
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = customWorkers)
+ tweak(multisession, workers = I(customWorkers))
))
n1
will always use two cores, n2
three cores,
diff --git a/revdep/library.noindex/greta/new/future/doc/future-3-topologies.md.rsp b/revdep/library.noindex/greta/new/future/doc/future-3-topologies.md.rsp
index 070268dd..739e8145 100644
--- a/revdep/library.noindex/greta/new/future/doc/future-3-topologies.md.rsp
+++ b/revdep/library.noindex/greta/new/future/doc/future-3-topologies.md.rsp
@@ -102,15 +102,15 @@ Although this does not give an error, we will find that the inner layer of futur
Now, we could imagine that we process the outer layer with, say, two parallel futures, and then the inner layer with four parallel futures. In that case, we would end up running on at most eight cores (= 2 * 4). This can be achieved by forcing a fixed number of workers at each layer:
```r
-plan(list(tweak(multisession, workers = 2), tweak(multisession, workers = 4)))
+plan(list(tweak(multisession, workers = 2), tweak(multisession, workers = I(4))))
```
-When using this approach, there is a risk of setting up too many concurrent workers. To make sure the setup respects `availableCores()`, use something like:
+When using this approach, there is a risk of setting up too many concurrent workers. Because Futureverse has a built-in protection, we need to declare nested workers using the As-Is `I(.)` function, which basically tells the parallel framework "trust us, we know what we are doing". To minimize the risk of mistakes and to make sure our setup respects `availableCores()`, use something like:
```r
plan(list(
tweak(multisession, workers = availableCores() %/% 4),
- tweak(multisession, workers = 4)
+ tweak(multisession, workers = I(4))
))
```
@@ -134,7 +134,7 @@ One possible downside to the above setup is that we might not utilize all availa
nodes <- rep(c("n1", "n2", "n3"), each = 8)
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = 2)
+ tweak(multisession, workers = I(2))
))
```
@@ -229,7 +229,7 @@ tweaking the `multisession` plan by passing a function to `workers`;
halfCores <- function() { max(1, round(0.5 * availableCores()))
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = halfCores)
+ tweak(multisession, workers = I(halfCores))
))
```
@@ -250,7 +250,7 @@ customWorkers <- function() {
}
plan(list(
tweak(cluster, workers = nodes),
- tweak(multisession, workers = customWorkers)
+ tweak(multisession, workers = I(customWorkers))
))
```
diff --git a/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.html b/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.html
index eb3c88f8..7907c7f5 100644
--- a/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.html
+++ b/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.html
@@ -172,7 +172,7 @@ A first example - file connections
The culprit here is that the connection uses a so called external pointer:
str(con)
## Classes 'file', 'connection' atomic [1:1] 3
-## ..- attr(*, "conn_id")=<externalptr>
+## ..- attr(*, "conn_id")=<externalptr>
which is bound to the main R process and makes no sense to the worker. Ideally, the R process of the worker would detect this and produce an informative error message, but as seen here, that does not always occur.
externalptr
)externalptr
)externalptr
)cpp_source()
connection
)externalptr
)externalptr
; not all)If an object carries an external pointer, it is likely that it can only be used in the R session where it was created. If it is exported to and used in a parallel process, it will likely cause an error there. As shown above, and in below examples, setting option future.globals.onReference
to "error"
will make future to scan for external pointer:s before launching the future on a parallel worker, and throw an error if one is detected.
However, there are objects with external pointer:s that can be exported, e.g. data.table
objects of the data.table package is one such example. In other words, the existence of a external pointer is just a suggestion for an object being non-exportable - it is not a sufficient condition.
Below are some examples of packages who produce non-exportable objects with external pointer:s.
+The arrow package provides efficient in-memory storage of arrays +and tables. However, these objects cannot be transferred as-is to a +parallel worker.
+library(arrow)
+
+library(future)
+plan(multisession)
+
+data <- as_arrow_table(iris)
+f <- future(dim(data))
+v <- value(f)
+#> Error: Invalid <Table>, external pointer to null
+
+This error takes place on the parallel worker. We could set
+options(future.globals.onReference = "error")
to have future
+detect the problem before it sends the object over to the parallel
+worker.
That said, the arrow package provides low-level functions
+write_to_raw()
and read_ipc_stream()
that can used to marshal and
+unmarshal arrow objects. For example,
library(arrow)
+
+library(future)
+plan(multisession)
+
+data <- as_arrow_table(iris)
+.data <- write_to_raw(data) ## marshal
+f <- future({
+ data <- read_ipc_stream(.data) ## unmarshal
+ dim(data)
+})
+v <- value(f)
+print(v)
+#> [1] 150 5
+
+The bigmemory package provides mechanisms for working with very +large matrices that can be updated in-place, which helps save +memory. For example,
+library(bigmemory)
+
+g <- function(x) {
+ x[1,1] <- 42L
+ x
+}
+
+x <- big.matrix(nrow = 3, ncol = 2, type = "integer")
+print(x[1,1])
+#> [1] NA
+
+void <- g(x)
+print(x[1,1])
+#> [1] 42
+
+Note how x
was updated in-place. This is achieved by big.matrix
+objects holds an external pointer to where the matrix data is stored;
str(x)
+#> Formal class 'big.matrix' [package "bigmemory"] with 1 slot
+#> ..@ address:<externalptr>
+
+If we would try to use x
in a parallel worker, then the parallel
+worker crashes due to a bug in bigmemory, e.g.
library(bigmemory)
+
+library(future)
+plan(multisession, workers = 2)
+
+x <- big.matrix(nrow = 3, ncol = 2, type = "integer")
+f <- future(dim(x), packages = "bigmemory")
+value(f)
+#> Error in unserialize(node$con) :
+#> MultisessionFuture (<none>) failed to receive message results from
+#> cluster RichSOCKnode #1 (PID 1746676 on localhost 'localhost'). The
+#> reason reported was 'error reading from connection'. Post-mortem
+#> diagnostic: No process exists with this PID, i.e. the localhost worker
+#> is no longer alive. Detected a non-exportable reference
+#> ('externalptr') in one of the globals ('x' of class 'big.matrix') used
+#> in the future expression. The total size of the 1 globals exported is
+#> 696 bytes. There is one global: 'x' (696 bytes of class 'S4')
+
+We can protected against this setting:
+options(future.globals.onReference = "error")
+
+which gives:
+f <- future(dim(x), packages = "bigmemory")
+#> Error: Detected a non-exportable reference ('externalptr') in one
+#> of the globals ('x' of class 'big.matrix') used in the future
+#> expression
+
Another example is cpp11, which allows us to easily create R functions that are implemented in C++, e.g.
cpp11::cpp_source(code = '
@@ -339,7 +441,7 @@ Package: cpp11
x <- rnorm(10)
n %<-% my_length(x)
n
-#> Error in .Call("_code_1748ff617940b9_my_length", x, PACKAGE = "code_1748ff617940b9") :
+#> Error in .Call("_code_1748ff617940b9_my_length", x, PACKAGE = "code_1748ff617940b9") :
#> "_code_1748ff617940b9_my_length" not available for .Call() for package "code_1748ff617940b9"
This is because:
@@ -408,7 +510,7 @@This is error message is not very helpful. But, if we turn on @@ -421,9 +523,9 @@
Functions serialize_model()
and unserialize_model()
of the
keras package can be used as workaround to marshal and unmarshal
non-exportable keras objects, e.g.
.model <- serialize_model(model)
+.model <- serialize_model(model) ## marshal
f <- future({
- model <- unserialize_model(.model)
+ model <- unserialize_model(.model) ## unmarshal
stats::predict(model, test_input)
}, seed = TRUE)
rm(.model) ## not needed anymore
@@ -452,6 +554,27 @@ Package: magick
## 'magick-image') in one of the globals ('frink' of class 'magick-image')
## used in the future expression
+Package: polars
+The polars package provides objects for performant processing on
+tabular data. However, these objects are tied to the R process that
+created them. If we attempt to use them in a parallel worker, we end
+up crashing the parallel worker:
+library(future)
+plan(multisession)
+
+library(polars)
+data <- as_polars_df(data.frame(x = 1:3))
+f <- future(dim(data), packages = "polars")
+v <- value(f)
+#> Error: Execution halted with the following contexts
+#> 0: In R: in `$.RPolarsDataFrame`
+#> 0: During function call [workRSOCK()]
+#> 1: This Polars object is not valid. Execute `rm(<object>)` to remove
+#> the object or restart the R session.
+
+This is because the external pointer in the RPolarsDataFrame
object
+is erased when transferred to another process, which polars (>= 0.15.0)
+detects and gives an informative error message about.
Package: raster
The raster package provides methods for working with spatial data, which are held in ‘RasterLayer’ objects. Not all but some of these objects use an external pointer. For example,
library(future)
@@ -511,7 +634,7 @@ Package: reticulate
os <- import("os")
pwd %<-% os$getcwd()
pwd
-## Error in eval(quote(os$getcwd()), new.env()) :
+## Error in eval(quote(os$getcwd()), new.env()) :
## attempt to apply non-function
and by telling the future package to validate globals further, we get:
@@ -527,7 +650,7 @@ Package: reticulate
## [1] 2.4
y %<-% twice(1.2)
y
-## Error in unserialize(node$con) :
+## Error in unserialize(node$con) :
## Failed to retrieve the value of MultisessionFuture from cluster node #1
## (on 'localhost'). The reason reported was 'error reading from connection'
@@ -599,7 +722,7 @@ To catch this as soon as possible,
@@ -642,15 +765,18 @@Functions wrap()
and vect()
of the terra package can be used as workaround to marshal and unmarshal non-exportable terra objects, e.g.
Functions wrap()
and unwrap()
of the terra package can be used as workaround to marshal and unmarshal non-exportable terra objects, e.g.
library(future)
plan(multisession)
library(terra)
file <- system.file("ex/lux.shp", package = "terra")
v <- vect(file)
-.v <- wrap(v)
-dv %<-% { v <- vect(.v); dim(v) }
+.v <- wrap(v) ## marshal
+dv %<-% {
+ v <- unwrap(.v) ## unmarshal
+ dim(v)
+}
rm(.v) ## not needed anymore
dv
[1] 12 6
@@ -658,11 +784,14 @@ Package: terra
and
file <- system.file("ex/elev.tif", package = "terra")
r <- rast(file)
-.r <- wrap(v)
-dr %<-% { r <- vect(.r); dim(r) }
+.r <- wrap(r) ## marshal
+dr %<-% {
+ r <- unwrap(.r) ## unmarshal
+ dim(r)
+}
rm(.r) ## not needed anymore
dr
-[1] 12 6
+[1] 90 95 1
For more details, see help("wrap", package = "terra")
.
Package: udpipe
@@ -673,7 +802,7 @@ Package: udpipe
udmodel <- udpipe_load_model(file = udmodel$file_model)
x %<-% udpipe_annotate(udmodel, x = "Ik ging op reis en ik nam mee.")
x
-## Error in udp_tokenise_tag_parse(object$model, x, doc_id, tokenizer, tagger, :
+## Error in udp_tokenise_tag_parse(object$model, x, doc_id, tokenizer, tagger, :
## external pointer is not valid
To catch this as soon as possible,
@@ -694,12 +823,12 @@works just fine but if we attempt to pass on the ‘xgb.DMatrix’ object train
to an external worker, we silently get a incorrect value:
f <- future(dim(dtrain))
+f <- future(dim(train))
d <- value(f)
d
## NULL
@@ -757,7 +886,7 @@ Package: XML
2: normal R exit
3: exit R without saving workspace
4: exit R saving workspace
-Selection:
+Selection:
This is a very harsh way of telling us that we cannot export all types
of objects produced by XML. Ideally, XML would detect this
@@ -772,9 +901,12 @@
Package: XML
a <- getNodeSet(doc, "/doc//a[@status]")[[1]]
## Marshall the non-exportable XMLInternalElementNode object
-a_m <- xmlSerializeHook(a)
+.a <- xmlSerializeHook(a) ## marshal
-f <- future({ a <- xmlDeserializeHook(a_m); xmlGetAttr(a, "status") })
+f <- future({
+ a <- xmlDeserializeHook(.a) ## unmarshal
+ xmlGetAttr(a, "status")
+})
value(f)
## [1] "xyz"
@@ -813,12 +945,12 @@ One workaround when dealing with non-exportable objects is to look for ways to encode the object such that it can be exported, and the decoded on the receiving end. With xml2, we can use xml2::xml_serialize()
and xml2::xml_unserialize()
to do this. Here is how we can rewrite the above example such that we can pass xml2 object back and forth between the main R session and R workers:
## Encode the 'xml_document' object 'doc' as a 'raw' object
-doc_raw <- xml_serialize(doc, connection = NULL)
+.doc <- xml_serialize(doc, connection = NULL) ## marshal
f <- future({
## In the future, reconstruct the 'xml_document' object
## from the 'raw' object
- doc <- xml_unserialize(doc_raw)
+ doc <- xml_unserialize(.doc) ## unmarshal
## Continue as usual
children <- xml_children(doc)
@@ -909,16 +1041,16 @@ Package rstan
data {
int<lower=0> N;
real y[N];
-}
+}
parameters {
real mu;
-}
+}
model {
target += normal_lpdf(mu | 0, 10);
target += normal_lpdf(y | mu, 1);
-}
+}
"
y <- rnorm(20)
diff --git a/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.md.rsp b/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.md.rsp
index cf1ef544..3331a72a 100644
--- a/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.md.rsp
+++ b/revdep/library.noindex/greta/new/future/doc/future-4-non-exportable-objects.md.rsp
@@ -48,7 +48,7 @@ The culprit here is that the connection uses a so called _external pointer_:
```r
str(con)
## Classes 'file', 'connection' atomic [1:1] 3
-## ..- attr(*, "conn_id")=
+## ..- attr(*, "conn_id")=
```
which is bound to the main R process and makes no sense to the worker. Ideally, the R process of the worker would detect this and produce an informative error message, but as seen here, that does not always occur.
@@ -75,7 +75,9 @@ _If you identify other cases, please consider [reporting](https://github.com/Hen
Package | Examples of non-exportable types or classes
:---------------|:-------------------------------------------
+ **arrow** | Table (`externalptr`)
**base** | connection (`externalptr`)
+ **bigmemory** | big.matrix (`externalptr`)
**cpp11** | E.g. functions created by `cpp_source()`
**DBI** | DBIConnection (`externalptr`)
**inline** | CFunc (`externalptr` of class DLLHandle)
@@ -83,6 +85,7 @@ _If you identify other cases, please consider [reporting](https://github.com/Hen
**magick** | magick-image (`externalptr`)
**ncdf4** | ncdf4 (custom reference; _non-detectable_)
**parallel** | cluster and cluster nodes (`connection`)
+ **polars** | RPolarsDataFrame (`externalptr`)
**raster** | RasterLayer (`externalptr`; _not all_)
**Rcpp** | NativeSymbol (`externalptr`)
**reticulate** | python.builtin.function (`externalptr`), python.builtin.module (`externalptr`)
@@ -144,6 +147,122 @@ However, there are objects with _external pointer_:s that can be exported, e.g.
Below are some examples of packages who produce non-exportable objects with _external pointer_:s.
+#### Package: arrow
+
+The **[arrow]** package provides efficient in-memory storage of arrays
+and tables. However, these objects cannot be transferred as-is to a
+parallel worker.
+
+```r
+library(arrow)
+
+library(future)
+plan(multisession)
+
+data <- as_arrow_table(iris)
+f <- future(dim(data))
+v <- value(f)
+#> Error: Invalid , external pointer to null
+```
+
+This error takes place on the parallel worker. We could set
+`options(future.globals.onReference = "error")` to have **future**
+detect the problem before it sends the object over to the parallel
+worker.
+
+That said, the **arrow** package provides low-level functions
+`write_to_raw()` and `read_ipc_stream()` that can used to marshal and
+unmarshal **arrow** objects. For example,
+
+```r
+library(arrow)
+
+library(future)
+plan(multisession)
+
+data <- as_arrow_table(iris)
+.data <- write_to_raw(data) ## marshal
+f <- future({
+ data <- read_ipc_stream(.data) ## unmarshal
+ dim(data)
+})
+v <- value(f)
+print(v)
+#> [1] 150 5
+```
+
+
+#### Package: bigmemory
+
+The **[bigmemory]** package provides mechanisms for working with very
+large matrices that can be updated in-place, which helps save
+memory. For example,
+
+```r
+library(bigmemory)
+
+g <- function(x) {
+ x[1,1] <- 42L
+ x
+}
+
+x <- big.matrix(nrow = 3, ncol = 2, type = "integer")
+print(x[1,1])
+#> [1] NA
+
+void <- g(x)
+print(x[1,1])
+#> [1] 42
+```
+
+Note how `x` was updated in-place. This is achieved by `big.matrix`
+objects holds an external pointer to where the matrix data is stored;
+
+```r
+str(x)
+#> Formal class 'big.matrix' [package "bigmemory"] with 1 slot
+#> ..@ address:
+```
+
+If we would try to use `x` in a parallel worker, then the parallel
+worker crashes due to a bug in **bigmemory**, e.g.
+
+```r
+library(bigmemory)
+
+library(future)
+plan(multisession, workers = 2)
+
+x <- big.matrix(nrow = 3, ncol = 2, type = "integer")
+f <- future(dim(x), packages = "bigmemory")
+value(f)
+#> Error in unserialize(node$con) :
+#> MultisessionFuture () failed to receive message results from
+#> cluster RichSOCKnode #1 (PID 1746676 on localhost 'localhost'). The
+#> reason reported was 'error reading from connection'. Post-mortem
+#> diagnostic: No process exists with this PID, i.e. the localhost worker
+#> is no longer alive. Detected a non-exportable reference
+#> ('externalptr') in one of the globals ('x' of class 'big.matrix') used
+#> in the future expression. The total size of the 1 globals exported is
+#> 696 bytes. There is one global: 'x' (696 bytes of class 'S4')
+```
+
+We can protected against this setting:
+
+```r
+options(future.globals.onReference = "error")
+```
+
+which gives:
+
+```r
+f <- future(dim(x), packages = "bigmemory")
+#> Error: Detected a non-exportable reference ('externalptr') in one
+#> of the globals ('x' of class 'big.matrix') used in the future
+#> expression
+```
+
+
#### Package: cpp11
Another example is **[cpp11]**, which allows us to easily create R functions that are implemented in C++, e.g.
@@ -176,7 +295,7 @@ plan(multisession)
x <- rnorm(10)
n %<-% my_length(x)
n
-#> Error in .Call("_code_1748ff617940b9_my_length", x, PACKAGE = "code_1748ff617940b9") :
+#> Error in .Call("_code_1748ff617940b9_my_length", x, PACKAGE = "code_1748ff617940b9") :
#> "_code_1748ff617940b9_my_length" not available for .Call() for package "code_1748ff617940b9"
```
@@ -220,7 +339,7 @@ library(future)
plan(cluster, workers = 1L)
f <- future(sum_1_to_n(10, 0))
v <- value(f)
-## Error in .Primitive(".C")(, n = as.integer(n), x = as.double(x)) :
+## Error in .Primitive(".C")(, n = as.integer(n), x = as.double(x)) :
## NULL value passed as symbol address
```
@@ -266,7 +385,7 @@ f <- future({
stats::predict(model, test_input)
}, seed = TRUE)
pred <- value(f)
-## Error in do.call(object$predict, args) :
+## Error in do.call(object$predict, args) :
## 'what' must be a function or character string
```
@@ -285,9 +404,9 @@ Functions `serialize_model()` and `unserialize_model()` of the
non-exportable **keras** objects, e.g.
```r
-.model <- serialize_model(model)
+.model <- serialize_model(model) ## marshal
f <- future({
- model <- unserialize_model(.model)
+ model <- unserialize_model(.model) ## unmarshal
stats::predict(model, test_input)
}, seed = TRUE)
rm(.model) ## not needed anymore
@@ -329,6 +448,33 @@ we'll see that this is caught even before attempting to run this in parallel;
```
+#### Package: polars
+
+The **[polars]** package provides objects for performant processing on
+tabular data. However, these objects are tied to the R process that
+created them. If we attempt to use them in a parallel worker, we end
+up crashing the parallel worker:
+
+```r
+library(future)
+plan(multisession)
+
+library(polars)
+data <- as_polars_df(data.frame(x = 1:3))
+f <- future(dim(data), packages = "polars")
+v <- value(f)
+#> Error: Execution halted with the following contexts
+#> 0: In R: in `$.RPolarsDataFrame`
+#> 0: During function call [workRSOCK()]
+#> 1: This Polars object is not valid. Execute `rm(
@@ -106,6 +114,8 @@ -- C --
optimisation methods
chi_squared
probability distributions
+chol.greta_array
+Compute the Cholesky Factor of a Matrix
chol2inv
Functions overloaded by greta
chol2symm
@@ -120,13 +130,21 @@ -- C --
Functions overloaded by greta
cov2cor
Functions overloaded by greta
+cpu_only
+Set GPU or CPU usage
-- D --
+destroy_greta_deps
+Remove greta dependencies and remove miniconda
diag
Functions overloaded by greta
+dim.node
+generic to grab dimensions of nodes
+dim<-.unknowns
+set dims like on a matrix/array
dirichlet
probability distributions
dirichlet_multinomial
@@ -172,26 +190,30 @@ -- G --
gamma
probability distributions
+gpu_cpu
+Set GPU or CPU usage
+gpu_only
+Set GPU or CPU usage
gradient_descent
optimisation methods
greta
greta: simple and scalable statistical modelling in R
greta_array
create data greta arrays
-greta_notes_conda_create_error
-Retrieve python installation or error details.
-greta_notes_conda_create_output
-Retrieve python installation or error details.
-greta_notes_conda_install_error
-Retrieve python installation or error details.
-greta_notes_conda_install_output
-Retrieve python installation or error details.
-greta_notes_install_miniconda_error
-Retrieve python installation or error details.
-greta_notes_install_miniconda_output
-Retrieve python installation or error details.
+greta_create_conda_env
+Create conda environment for greta
+greta_deps_receipt
+Capture greta python dependencies.
+greta_deps_spec
+Specify python dependencies for greta
+greta_deps_tf_tfp
+Suggested valid Python dependencies for greta
+greta_install_miniconda
+Installs miniconda
greta_notes_tf_num_error
-Retrieve python installation or error details.
+Retrieve python messages.
+greta_set_install_logfile
+Set logfile path when installing greta
greta_sitrep
Greta Situation Report
@@ -232,6 +254,10 @@ -- I --
probability distributions
iprobit
transformation functions for greta arrays
+is.greta_array
+Is object a greta array?
+is.greta_mcmc_list
+Is object a 'greta_mcmc_list'?
-- J --
@@ -278,6 +304,8 @@ -- M --
-- N --
+nadam
+optimisation methods
negative_binomial
probability distributions
nelder_mead
@@ -293,6 +321,8 @@ -- O --
ones
create data greta arrays
+open_greta_install_log
+Read a greta logfile
operators
arithmetic, logical and relational operators for greta arrays
opt
@@ -316,6 +346,10 @@ -- P --
probability distributions
powell
optimisation methods
+print.greta_deps_spec
+Print method for greta python deps
+print.greta_mcmc_list
+Print method for greta MCMC list
print.greta_model
greta model objects
proximal_adagrad
@@ -353,6 +387,8 @@ -- R --
Functions overloaded by greta
rowSums
Functions overloaded by greta
+run_optimiser
+Dispatch optimisation method to right class
rwmh
MCMC samplers
@@ -370,8 +406,6 @@ -- S --
MCMC samplers
slsqp
optimisation methods
-stash-notes
-Retrieve python installation or error details.
stashed_samples
Statistical inference on greta models.
structures
@@ -414,6 +448,8 @@ -- W --
probability distributions
wishart
probability distributions
+write_greta_install_log
+Write greta dependency installation log file
-- Z --
diff --git a/revdep/library.noindex/greta/old/greta/CITATION b/revdep/library.noindex/greta/old/greta/CITATION
index f857195c..45bbb6b7 100644
--- a/revdep/library.noindex/greta/old/greta/CITATION
+++ b/revdep/library.noindex/greta/old/greta/CITATION
@@ -1,8 +1,8 @@
citHeader("To cite greta in publications use:")
-citEntry(entry = "Article",
+bibentry(bibtype = "Article",
title = "{greta}: simple and scalable statistical modelling in R",
- author = personList(as.person("Nick Golding")),
+ author = c(person(given = "Nick", family = "Golding", email = "nick.golding.research@gmail.com")),
journal = "Journal of Open Source Software",
year = "2019",
volume = "4",
diff --git a/revdep/library.noindex/greta/old/greta/DESCRIPTION b/revdep/library.noindex/greta/old/greta/DESCRIPTION
index a84b135c..d9ea9245 100644
--- a/revdep/library.noindex/greta/old/greta/DESCRIPTION
+++ b/revdep/library.noindex/greta/old/greta/DESCRIPTION
@@ -1,7 +1,7 @@
Type: Package
Package: greta
Title: Simple and Scalable Statistical Modelling in R
-Version: 0.4.3
+Version: 0.4.5
Authors@R: c(
person("Nick", "Golding", , "nick.golding.research@gmail.com", role = "aut",
comment = c(ORCID = "0000-0001-8916-5570")),
@@ -35,15 +35,15 @@ Imports: abind, callr, cli (>= 3.0.0), coda, future (>= 1.22.1), glue
(>= 1.5.1), methods, parallelly (>= 1.29.0), progress (>=
1.2.0), R6, reticulate (>= 1.19.0), tensorflow (>= 2.7.0),
yesno
-Suggests: bayesplot, covr, cramer, DiagrammeR, extraDistr, fields,
- ggplot2, knitr, lattice, MASS, MCMCpack, mockery, mvtnorm,
- rmarkdown, rmutil, spelling, testthat (>= 3.1.0), tidyverse,
- truncdist, withr
+Suggests: bayesplot, covr, cramer, DiagrammeR, DiagrammeRsvg,
+ extraDistr, fields, ggplot2, knitr, lattice, MASS, MCMCpack,
+ mockery, mvtnorm, rmarkdown, rmutil, rsvg, spelling, testthat
+ (>= 3.1.0), tidyverse, truncdist, withr
VignetteBuilder: knitr
Config/testthat/edition: 3
Encoding: UTF-8
Language: en-GB
-RoxygenNote: 7.2.0
+RoxygenNote: 7.3.1
SystemRequirements: Python (>= 2.7.0) with header files and shared
library; TensorFlow (v1.14; https://www.tensorflow.org/);
TensorFlow Probability (v0.7.0;
@@ -64,7 +64,7 @@ Collate: 'package.R' 'utils.R' 'greta_mcmc_list.R' 'tf_functions.R'
'test_if_forked_cluster.R' 'testthat-helpers.R' 'zzz.R'
'internals.R'
NeedsCompilation: no
-Packaged: 2022-09-08 13:42:15 UTC; njtierney
+Packaged: 2024-03-11 06:20:23 UTC; nick
Author: Nick Golding [aut] (),
Nicholas Tierney [aut, cre] (),
Simon Dirmeier [ctb],
@@ -81,5 +81,5 @@ Author: Nick Golding [aut] (),
Jian Yen [ctb]
Maintainer: Nicholas Tierney
Repository: CRAN
-Date/Publication: 2022-09-08 14:12:56 UTC
-Built: R 4.3.0; ; 2023-07-10 04:59:54 UTC; unix
+Date/Publication: 2024-03-11 08:10:02 UTC
+Built: R 4.4.0; ; 2024-04-06 10:05:44 UTC; unix
diff --git a/revdep/library.noindex/greta/old/greta/INDEX b/revdep/library.noindex/greta/old/greta/INDEX
index 2a334c4e..d2cb2787 100644
--- a/revdep/library.noindex/greta/old/greta/INDEX
+++ b/revdep/library.noindex/greta/old/greta/INDEX
@@ -9,9 +9,9 @@ functions functions for greta arrays
greta greta: simple and scalable statistical
modelling in R
greta_notes_install_miniconda_output
- Retrieve python installation or error details
+ Retrieve python installation or error details.
greta_sitrep Greta Situation Report
-inference statistical inference on greta models
+inference Statistical inference on greta models.
install_greta_deps Install Python dependencies for greta
internals internal greta methods
joint define joint distributions
diff --git a/revdep/library.noindex/greta/old/greta/Meta/Rd.rds b/revdep/library.noindex/greta/old/greta/Meta/Rd.rds
index e0ca9585..2c343ae7 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/Rd.rds and b/revdep/library.noindex/greta/old/greta/Meta/Rd.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/features.rds b/revdep/library.noindex/greta/old/greta/Meta/features.rds
index 97f580d7..ded31532 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/features.rds and b/revdep/library.noindex/greta/old/greta/Meta/features.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/hsearch.rds b/revdep/library.noindex/greta/old/greta/Meta/hsearch.rds
index a4fa88b9..e1ef5940 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/hsearch.rds and b/revdep/library.noindex/greta/old/greta/Meta/hsearch.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/links.rds b/revdep/library.noindex/greta/old/greta/Meta/links.rds
index de5b8c39..113dee74 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/links.rds and b/revdep/library.noindex/greta/old/greta/Meta/links.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/nsInfo.rds b/revdep/library.noindex/greta/old/greta/Meta/nsInfo.rds
index cf68a186..482f2a68 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/nsInfo.rds and b/revdep/library.noindex/greta/old/greta/Meta/nsInfo.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/package.rds b/revdep/library.noindex/greta/old/greta/Meta/package.rds
index 4b4dcd4d..04edb806 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/package.rds and b/revdep/library.noindex/greta/old/greta/Meta/package.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/Meta/vignette.rds b/revdep/library.noindex/greta/old/greta/Meta/vignette.rds
index 71328768..992ef11e 100644
Binary files a/revdep/library.noindex/greta/old/greta/Meta/vignette.rds and b/revdep/library.noindex/greta/old/greta/Meta/vignette.rds differ
diff --git a/revdep/library.noindex/greta/old/greta/NAMESPACE b/revdep/library.noindex/greta/old/greta/NAMESPACE
index f0bdbda2..767031e7 100644
--- a/revdep/library.noindex/greta/old/greta/NAMESPACE
+++ b/revdep/library.noindex/greta/old/greta/NAMESPACE
@@ -20,6 +20,7 @@ S3method("[",greta_array)
S3method("[<-",greta_array)
S3method("^",greta_array)
S3method("dim<-",greta_array)
+S3method("dim<-",unknowns)
S3method("|",greta_array)
S3method(Arg,greta_array)
S3method(Conj,greta_array)
@@ -42,8 +43,12 @@ S3method(as.greta_array,logical)
S3method(as.greta_array,matrix)
S3method(as.greta_array,node)
S3method(as.greta_array,numeric)
+S3method(as.greta_model,dag_class)
S3method(as.matrix,greta_array)
S3method(as.mcmc.list,greta_mcmc_list)
+S3method(as.unknowns,array)
+S3method(as.unknowns,matrix)
+S3method(as.unknowns,unknowns)
S3method(as_data,default)
S3method(as_data,greta_array)
S3method(asin,greta_array)
@@ -77,6 +82,7 @@ S3method(diag,default)
S3method(diag,greta_array)
S3method(digamma,greta_array)
S3method(dim,greta_array)
+S3method(dim,node)
S3method(eigen,default)
S3method(eigen,greta_array)
S3method(exp,greta_array)
diff --git a/revdep/library.noindex/greta/old/greta/NEWS.md b/revdep/library.noindex/greta/old/greta/NEWS.md
index d5fc77ea..27c8543c 100644
--- a/revdep/library.noindex/greta/old/greta/NEWS.md
+++ b/revdep/library.noindex/greta/old/greta/NEWS.md
@@ -1,3 +1,15 @@
+# greta 0.4.5
+
+## Bug Fixes
+
+- Remove trailing comma bug in glue #618
+
+# greta 0.4.4
+
+## Bug fixes
+
+- Some small documentation bugs were fixed, namely the sentinel "_PACKAGE" documentation, and various small changes to correctly export S3 methods.
+
# greta 0.4.3
## Features
diff --git a/revdep/library.noindex/greta/old/greta/R/greta.rdb b/revdep/library.noindex/greta/old/greta/R/greta.rdb
index e1caf34b..3a670104 100644
Binary files a/revdep/library.noindex/greta/old/greta/R/greta.rdb and b/revdep/library.noindex/greta/old/greta/R/greta.rdb differ
diff --git a/revdep/library.noindex/greta/old/greta/R/greta.rdx b/revdep/library.noindex/greta/old/greta/R/greta.rdx
index 8dc7242e..0ba32af2 100644
Binary files a/revdep/library.noindex/greta/old/greta/R/greta.rdx and b/revdep/library.noindex/greta/old/greta/R/greta.rdx differ
diff --git a/revdep/library.noindex/greta/old/greta/WORDLIST b/revdep/library.noindex/greta/old/greta/WORDLIST
index ec50e653..c971b48e 100644
--- a/revdep/library.noindex/greta/old/greta/WORDLIST
+++ b/revdep/library.noindex/greta/old/greta/WORDLIST
@@ -15,10 +15,10 @@ Inf
Jupyter
Lmax
Lmin
-Maëlle
Miniconda
Nesterov
NumPy
+ORCID
OpenBUGS
PSAT
Poisson
@@ -31,7 +31,6 @@ TensorFlow
TensorFlow's
Tensorflow
WinBUGS
-Wujciak
XLA
bayesplot
cholesky
@@ -63,7 +62,6 @@ pkgdown
poisson
polygamma
schoolers
-snapshotting
softmax
speedup
st
diff --git a/revdep/library.noindex/greta/old/greta/doc/example_models.R b/revdep/library.noindex/greta/old/greta/doc/example_models.R
index 60c84318..32833559 100644
--- a/revdep/library.noindex/greta/old/greta/doc/example_models.R
+++ b/revdep/library.noindex/greta/old/greta/doc/example_models.R
@@ -4,580 +4,3 @@ knitr::opts_chunk$set(comment = NA,
cache = TRUE)
library (greta)
-## ----linear_greta-------------------------------------------------------------
-# variables & priors
-int <- normal(0, 10)
-coef <- normal(0, 10)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----multiple_linear_data-----------------------------------------------------
-data(attitude)
-design <- as.matrix(attitude[, 2:7])
-
-## ----multiple_linear_greta----------------------------------------------------
-int <- normal(0, 10)
-coefs <- normal(0, 10, dim = ncol(design))
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-# matrix multiplication is more efficient than multiplying the coefficients
-# separately
-mu <- int + design %*% coefs
-
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----multiple_linear_warpbreaks_data------------------------------------------
-data("warpbreaks")
-X <- as_data(model.matrix(breaks ~ wool + tension, warpbreaks))
-y <- as_data(warpbreaks$breaks)
-
-## ----multiple_linear_warpbreaks_greta-----------------------------------------
-int <- variable()
-coefs <- normal(0, 5, dim = ncol(X) - 1)
-beta <- c(int, coefs)
-
-eta <- X %*% beta
-
-distribution(y) <- poisson(exp(eta))
-
-## ----multiple_linear_multilogit_data------------------------------------------
-data(iris)
-X <- as_data(cbind(1, iris[, 1:4]))
-y <- model.matrix(~ Species - 1, iris)
-P <- ncol(X)
-K <- ncol(y)
-
-## ----multiple_linear_multilogit_greta-----------------------------------------
-beta <- normal(0, 5, dim = c(P, K - 1))
-eta <- X %*% beta
-prob <- imultilogit(eta)
-distribution(y) <- categorical(prob)
-
-## ----multiple_linear_lasso_data-----------------------------------------------
-data(attitude)
-design <- as.matrix(attitude[, 2:7])
-
-## ----multiple_linear_lasso_greta----------------------------------------------
-int <- normal(0, 10)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-tau <- exponential(0.5, dim = ncol(design))
-coefs <- normal(0, tau)
-mu <- int + design %*% coefs
-
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----hierarchical_linear_greta------------------------------------------------
-# linear model parameters
-int <- normal(0, 10)
-coef <- normal(0, 10)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-# hierarchical model for species effect; use the first species as the baseline
-# like in lm()
-species_sd <- lognormal(0, 1)
-species_offset <- normal(0, species_sd, dim = 2)
-species_effect <- rbind(0, species_offset)
-species_id <- as.numeric(iris$Species)
-
-# model
-mu <- int + coef * iris$Sepal.Width + species_effect[species_id]
-distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-## ----hierarchical_linear_slopes_greta-----------------------------------------
-# linear model parameters
-int <- normal(0, 10)
-coef <- normal(0, 10)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-species_id <- as.numeric(iris$Species)
-
-# random intercepts
-species_int_sd <- lognormal(0, 1)
-species_int <- normal(0, species_int_sd, dim = 2)
-species_int_eff <- rbind(0, species_int)
-
-# random slopes
-species_slope_sd <- lognormal(0, 1)
-species_slope <- normal(0, species_slope_sd, dim = 2)
-species_slope_eff <- rbind(0, species_slope)
-
-# model
-mu <- int + coef * iris$Sepal.Width + species_int_eff[species_id] + iris$Sepal.Width * species_slope_eff[species_id]
-distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-## ----hierarchical_linear_slopes_corr_greta------------------------------------
-# model matrix
-modmat <- model.matrix(~ Sepal.Width, iris)
-# index of species
-jj <- as.numeric(iris$Species)
-
-M <- ncol(modmat) # number of varying coefficients
-N <- max(jj) # number of species
-
-# prior on the standard deviation of the varying coefficient
-tau <- exponential(0.5, dim = M)
-
-# prior on the correlation between the varying coefficient
-Omega <- lkj_correlation(3, M)
-
-# optimization of the varying coefficient sampling through
-# cholesky factorization and whitening
-Omega_U <- chol(Omega)
-Sigma_U <- sweep(Omega_U, 2, tau, "*")
-z <- normal(0, 1, dim = c(N, M))
-ab <- z %*% Sigma_U # equivalent to: ab ~ multi_normal(0, Sigma_U)
-
-# the linear predictor
-mu <- rowSums(ab[jj,] * modmat)
-
-# the residual variance
-sigma_e <- cauchy(0, 3, truncation = c(0, Inf))
-
-#model
-y <- iris$Sepal.Length
-distribution(y) <- normal(mu, sigma_e)
-
-## ----linear_uninformative_greta-----------------------------------------------
-# variables & priors
-int <- variable()
-coef <- variable()
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----linear_ridge_greta-------------------------------------------------------
-# variables & priors
-int <- variable()
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-tau <- inverse_gamma(1, 1)
-coef <- normal(0, tau)
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----linear_lasso_greta-------------------------------------------------------
-# variables & priors
-int <- variable()
-sd <- inverse_gamma(1, 1)
-
-lambda <- gamma(1, 1)
-tau <- exponential(0.5 * lambda**2)
-coef <- normal(0, tau)
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----linear_horseshoe_greta---------------------------------------------------
-horseshoe <- function (tau = 1, dim = NULL) {
- lambda <- cauchy(0, 1, truncation = c(0, Inf), dim = dim)
- sd <- tau ^ 2 * lambda ^ 2
- normal(0, sd, dim = dim)
-}
-
-# variables & priors
-int <- variable()
-sd <- inverse_gamma(1, 1)
-coef <- horseshoe()
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----linear_finnish_horseshoe_greta-------------------------------------------
-regularized_horseshoe <- function (tau = 1, c = 1, dim = NULL) {
- stopifnot(c > 0)
- lambda <- cauchy(0, 1, truncation = c(0, Inf), dim = dim)
- lambda_tilde <- (c^2 * lambda^2) / (c^2 + tau^2 * lambda^2)
- sd <- tau ^ 2 * lambda_tilde ^ 2
- normal(0, sd, dim = dim)
-}
-
-# variables & priors
-int <- variable()
-sd <- inverse_gamma(1, 1)
-coef <- regularized_horseshoe()
-
-# linear predictor
-mu <- int + coef * attitude$complaints
-
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-## ----hierarchical_linear_general_greta----------------------------------------
-int <- normal(0, 10)
-coef <- normal(0, 10)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-n_species <- length(unique(iris$Species))
-species_id <- as.numeric(iris$Species)
-
-Z <- model.matrix(~ Species + Sepal.Length * Species - 1, data = iris)
-
-gamma_matrix <- multivariate_normal(matrix(0, 1, 2),
- diag(2),
- n_realisations = 3)
-gamma <- c(gamma_matrix)
-
-wi <- as_data(iris$Sepal.Width)
-Z <- as_data(Z)
-mu <- int + coef * wi + Z %*% gamma
-
-distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-## ----hierarchical_linear_marginal_greta---------------------------------------
-int <- variable()
-coef <- normal(0, 5)
-sd <- cauchy(0, 3, truncation = c(0, Inf))
-
-n_species <- length(unique(iris$Species))
-species_id <- as.numeric(iris$Species)
-
-Z <- model.matrix(~ Species + Sepal.Length * Species - 1, data = iris)
-G <- zeros(n_species * 2, n_species * 2)
-
-for (s in unique(species_id)) {
- G[c(s, s + n_species), c(s, s + n_species)] <- diag(2)
-}
-
-mu <- int + coef * iris$Sepal.Width
-V <- zeros(nrow(iris), nrow(iris))
-diag(V) <- sd
-
-Z <- as_data(Z)
-V <- V + Z %*% G %*% t(Z)
-
-sep <- t(iris$Sepal.Width)
-distribution(sep) <- multivariate_normal(t(mu), V)
-
-## ----bayesian_neural_network_data, highlight = FALSE--------------------------
-N <- 100
-p <- 10
-
-set.seed(23)
-X <- matrix(rnorm(N * p), N)
-beta <- rnorm(10)
-y <- X %*% beta + rnorm(N, sd = 0.1)
-
-## ----bayesian_neural_network_greta--------------------------------------------
-neural_network <- function(x)
-{
- # this can be arbitrarily complex, e.g. multiple hidden layers
- x %*% weights
-}
-
-weights <- normal(0, 1, dim = c(p, 1))
-sd <- inverse_gamma(1, 1)
-
-distribution(y) <- normal(neural_network(X), sd)
-
-## ----factor_analysis_data, highlight = FALSE----------------------------------
-generate.data <- function(n = 100, p = 5, q = 2, psi = diag(rgamma(p, 1, 1)))
-{
- W <- matrix(rnorm(p * q, 1), p, q)
- Z <- matrix(rnorm(q * n, 2), q, n)
- WZ <- W %*% Z
-
- X <- matrix(0, n, p)
- for (i in seq_len(n)) {
- X[i, ] <- MASS::mvrnorm(1, WZ[, i], psi)
- }
-
- list(X = X, W = W, Z = Z, psi = psi)
-}
-
-n <- 100
-p <- 5
-q <- 2
-data <- generate.data(n = n, p = p, q = q)
-X <- data$X
-
-## ----factor_analysis----------------------------------------------------------
-W <- normal(0, 1, dim = c(p, q))
-Z <- normal(0, 1, dim = c(q, n))
-psi <- zeros(p, p)
-diag(psi) <- inverse_gamma(1, 1, dim = p)
-
-distribution(X) <- multivariate_normal(t(W %*% Z), psi)
-
-## ----air_data, highlight = FALSE----------------------------------------------
-y <- c(21, 20, 15)
-n <- c(48, 34, 21)
-Z <- c(10, 30, 50)
-alpha <- 4.48
-beta <- 0.76
-sigma2 <- 81.14
-sigma <- sqrt(sigma2)
-tau <- 1 / sigma2
-J <- 3
-
-## ----air_greta----------------------------------------------------------------
-theta <- normal(0, 32, dim = 2)
-mu <- alpha + beta * Z
-X <- normal(mu, sigma)
-p <- ilogit(theta[1] + theta[2] * X)
-distribution(y) <- binomial(n, p)
-
-## ----air_stan, echo = FALSE---------------------------------------------------
-cat(readLines('https://raw.githubusercontent.com/stan-dev/example-models/master/bugs_examples/vol2/air/air.stan'), sep = '\n')
-
-## ----beetles_data, highlight = FALSE------------------------------------------
-x <- c(1.6907, 1.7242, 1.7552, 1.7842, 1.8113, 1.8369, 1.8610, 1.8839)
-n <- c(59, 60, 62, 56, 63, 59, 62, 60)
-r <- c(6, 13, 18, 28, 52, 53, 61, 60)
-N <- 8
-
-## ----beetles_greta------------------------------------------------------------
-alpha_star <- normal(0, 32)
-beta <- normal(0, 32)
-p <- ilogit(alpha_star + beta * (x - mean(x)))
-distribution(r) <- binomial(n, p)
-
-alpha <- alpha_star - beta * mean(x)
-rhat <- p * n
-
-## ----beetles_stan, echo = FALSE-----------------------------------------------
-cat(readLines('https://raw.githubusercontent.com/stan-dev/example-models/master/bugs_examples/vol2/beetles/beetles_logit.stan'), sep = '\n')
-
-## ----lightspeed_data, highlight = FALSE---------------------------------------
-y <- c(28, 26, 33, 24, 34, -44, 27, 16, 40, -2, 29, 22, 24, 21, 25,
- 30, 23, 29, 31, 19, 24, 20, 36, 32, 36, 28, 25, 21, 28, 29,
- 37, 25, 28, 26, 30, 32, 36, 26, 30, 22, 36, 23, 27, 27, 28,
- 27, 31, 27, 26, 33, 26, 32, 32, 24, 39, 28, 24, 25, 32, 25,
- 29, 27, 28, 29, 16, 23)
-n <- length(y)
-
-## ----lightspeed_greta---------------------------------------------------------
-beta <- variable()
-sigma <- variable(lower = 0)
-
-distribution(y) <- normal(beta, sigma)
-
-## ----lightspeed_stan, echo = FALSE--------------------------------------------
-cat(readLines('https://raw.githubusercontent.com/stan-dev/example-models/master/ARM/Ch.8/lightspeed.stan'), sep = '\n')
-
-## ----schools_data, highlight = FALSE------------------------------------------
-y <- c(28, 8, -3, 7, -1, 1, 18, 12)
-sigma_y <- c(15, 10, 16, 11, 9, 11, 10, 18)
-N <- length(y)
-
-## ----schools_greta------------------------------------------------------------
-sigma_eta <- inverse_gamma(1, 1)
-eta <- normal(0, sigma_eta, dim=N)
-
-mu_theta <- normal(0, 100)
-xi <- normal(0, 5)
-theta <- mu_theta + xi * eta
-
-distribution(y) <- normal(theta, sigma_y)
-
-## ----schools_stan, echo = FALSE-----------------------------------------------
-cat(readLines('https://raw.githubusercontent.com/stan-dev/example-models/master/ARM/Ch.19/schools.stan'), sep = '\n')
-
-## ----data_logistic, highlight = FALSE-----------------------------------------
-# make fake data
-n_env <- 3
-n_sites <- 20
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species presence or absence
-occupancy <- rbinom(n_sites, 1, 0.5)
-
-## ----logistic_greta-----------------------------------------------------------
-alpha <- normal(0, 10)
-beta <- normal(0, 10, dim = n_env)
-
-# logit-linear model
-linear_predictor <- alpha + env %*% beta
-p <- ilogit(linear_predictor)
-
-# distribution (likelihood) over observed values
-distribution(occupancy) <- bernoulli(p)
-
-## ----data_poisson, highlight = FALSE------------------------------------------
-# make fake data
-n_env <- 3
-n_sites <- 20
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species abundance
-occupancy <- rpois(n_sites, 5)
-
-## ----poisson_greta------------------------------------------------------------
-alpha <- normal(0, 10)
-beta <- normal(0, 10, dim = n_env)
-linear_predictor <- alpha + env %*% beta
-lambda <- exp(linear_predictor)
-distribution(occupancy) <- poisson(lambda)
-
-## ----data_logistic_error_term_greta, highlight = FALSE------------------------
-# make fake data
-n_env <- 3
-n_sites <- 20
-n_obs <- 5
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species presence or absence over n_obs visits
-occupancy <- rbinom(n_sites, n_obs, 0.5)
-
-## ----logistic_error_term_greta------------------------------------------------
-alpha <- normal(0, 10)
-beta <- normal(0, 10, dim = n_env)
-error <- normal(0, 10, dim = n_sites)
-
-# logit-linear model with extra variation
-linear_predictor <- alpha + env %*% beta + error
-p <- ilogit(linear_predictor)
-
-# distribution (likelihood) over observed values
-distribution(occupancy) <- binomial(n_obs, p)
-
-## ----data_multispecies_bernoulli, highlight = FALSE---------------------------
-# make fake data
-n_species <- 5
-n_env <- 3
-n_sites <- 20
-
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-occupancy <- matrix(rbinom(n_species * n_sites, 1, 0.5), nrow = n_sites)
-
-## ----multispecies_bernoulli_greta---------------------------------------------
-alpha <- normal(0, 10, dim = n_species)
-beta <- normal(0, 10, dim = c(n_env, n_species))
-
-env_effect <- env %*% beta
-
-# add intercepts for all species
-linear_predictor <- sweep(env_effect, 2, alpha, FUN = '+')
-
-# ilogit of linear predictor
-p <- ilogit(linear_predictor)
-
-# a single observation means our data are bernoulli distributed
-distribution(occupancy) <- bernoulli(p)
-
-## ----data_multispecies_partially_pool, highlight = FALSE----------------------
-# make fake data
-n_species <- 5
-n_env <- 1
-n_sites <- 50
-
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-occupancy <- matrix(rbinom(n_sites * n_species, 1, 0.5), nrow = n_sites)
-
-## ----multispecies_partially_pool_greta----------------------------------------
-global_alpha <- normal(0, 10, dim = 1)
-global_alpha_sd <- uniform(0, 10, dim = 1)
-alpha <- normal(global_alpha, global_alpha_sd, dim = n_species)
-
-global_betas <- normal(0, 10, dim = n_env)
-global_betas_sd <- uniform(0, 10, dim = n_env)
-beta <- normal(global_betas, global_betas_sd, dim = c(n_env, n_species))
-
-env_effect <- env %*% beta
-
-# add intercepts for all species
-linear_predictor <- sweep(env_effect, 2, alpha, FUN = '+')
-
-# ilogit of linear predictor
-p <- ilogit(linear_predictor)
-
-distribution(occupancy) <- bernoulli(p)
-
-## ----data_multilevel, highlight = FALSE---------------------------------------
-# make fake data
-n_species <- 3
-n_env <- 1
-n_sites <- 5
-n_traits <- 1
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_species * n_traits matix of trait variables
-traits <- matrix(rnorm(n_species * n_traits), nrow = n_species)
-# n_sites * n_species matrix of observed occupancy
-occupancy <- matrix(rbinom(n_sites * n_species, 1, 0.5), nrow = n_sites)
-
-## ----multilevel_greta---------------------------------------------------------
-# include a column of 1's for intercept estimation in the sub-model (traits) and base model
-traits <- cbind(rep(1, n_species), traits)
-env <- cbind(rep(1, n_sites), env)
-
-# redefine n_env and n_traits after adding columns for intercepts
-n_env <- ncol(env)
-n_traits <- ncol(traits)
-
-# sub-model parameters have normal prior distributions
-g <- normal(0, 10, dim = c(n_env, n_traits))
-# parameters of the base model are a function of the parameters of the sub-model
-beta <- g %*% t(traits)
-
-# use the coefficients to get the model linear predictor
-linear_predictor <- env %*% beta
-
-# use the logit link to get probabilities of occupancy
-p <- ilogit(linear_predictor)
-
-# data are bernoulli distributed
-distribution(occupancy) <- bernoulli(p)
-
-## ----cjs_data, highlight = FALSE----------------------------------------------
-n_obs <- 100
-n_time <- 20
-y <- matrix(sample(c(0, 1), size = (n_obs * n_time), replace = TRUE),
- ncol = n_time)
-
-## ----cjs_greta----------------------------------------------------------------
-# data summaries
-first_obs <- apply(y, 1, function(x) min(which(x > 0)))
-final_obs <- apply(y, 1, function(x) max(which(x > 0)))
-obs_id <- apply(y, 1, function(x) seq(min(which(x > 0)), max(which(x > 0)), by = 1)[-1])
-obs_id <- unlist(obs_id)
-capture_vec <- apply(y, 1, function(x) x[min(which(x > 0)):max(which(x > 0))][-1])
-capture_vec <- unlist(capture_vec)
-
-# priors
-phi <- beta(1, 1, dim = n_time)
-p <- beta(1, 1, dim = n_time)
-
-# derived parameter
-chi <- ones(n_time)
-for (i in seq_len(n_time - 1)) {
- tn <- n_time - i
- chi[tn] <- (1 - phi[tn]) + phi[tn] * (1 - p[tn + 1]) * chi[tn + 1]
-}
-
-# dummy variables
-alive_data <- ones(length(obs_id)) # definitely alive
-not_seen_last <- final_obs != 20 # ignore observations in last timestep
-final_observation <- ones(sum(not_seen_last)) # final observation
-
-# set likelihoods
-distribution(alive_data) <- bernoulli(phi[obs_id - 1])
-distribution(capture_vec) <- bernoulli(p[obs_id])
-distribution(final_observation) <- bernoulli(chi[final_obs[not_seen_last]])
-
-## ----cjs_stan, echo = FALSE---------------------------------------------------
-cat(readLines('https://raw.githubusercontent.com/stan-dev/example-models/master/misc/ecology/mark-recapture/cjs-K.stan'), sep = '\n')
-
diff --git a/revdep/library.noindex/greta/old/greta/doc/example_models.html b/revdep/library.noindex/greta/old/greta/doc/example_models.html
index 66c7d79e..9d2171f0 100644
--- a/revdep/library.noindex/greta/old/greta/doc/example_models.html
+++ b/revdep/library.noindex/greta/old/greta/doc/example_models.html
@@ -29,111 +29,16 @@
+code{white-space: pre-wrap;}
+span.smallcaps{font-variant: small-caps;}
+span.underline{text-decoration: underline;}
+div.column{display: inline-block; vertical-align: top; width: 50%;}
+div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
+ul.task-list{list-style: none;}
+
-
-
-
@@ -339,435 +244,32 @@ Common models
Below are a few examples of common statistical models implemented in
greta.
-
-Linear regression
-A simple, one-variable Bayesian linear regression model using the
-attitude data
-# variables & priors
-<- normal(0, 10)
- int <- normal(0, 10)
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Multiple linear regression
-A multi-variable Bayesian linear regression model using the attitude
-data
-data(attitude)
-<- as.matrix(attitude[, 2:7]) design
-<- normal(0, 10)
- int <- normal(0, 10, dim = ncol(design))
- coefs <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-# matrix multiplication is more efficient than multiplying the coefficients
-# separately
-<- int + design %*% coefs
- mu
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Multiple Poisson regression
-A multiple Bayesian linear regression model using the
-warpbreaks
data.
-data("warpbreaks")
-<- as_data(model.matrix(breaks ~ wool + tension, warpbreaks))
- X <- as_data(warpbreaks$breaks) y
-<- variable()
- int <- normal(0, 5, dim = ncol(X) - 1)
- coefs <- c(int, coefs)
- beta
-<- X %*% beta
- eta
-distribution(y) <- poisson(exp(eta))
-
-
-Multiple categorical regression
-A multi-variable Bayesian categorical regression model using the iris
-data.
-data(iris)
-<- as_data(cbind(1, iris[, 1:4]))
- X <- model.matrix(~ Species - 1, iris)
- y <- ncol(X)
- P <- ncol(y) K
-<- normal(0, 5, dim = c(P, K - 1))
- beta <- X %*% beta
- eta <- imultilogit(eta)
- prob distribution(y) <- categorical(prob)
-
-
-Multiple linear regression with LASSO prior
-A multi-variable Bayesian linear regression model using an
-exponential-normal prior for the coefficients.
-data(attitude)
-<- as.matrix(attitude[, 2:7]) design
-<- normal(0, 10)
- int <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-<- exponential(0.5, dim = ncol(design))
- tau <- normal(0, tau)
- coefs <- int + design %*% coefs
- mu
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Hierarchical linear regression
-A hierarchical, Bayesian linear regression model using the iris data,
-with random intercepts for each of the three species.
-# linear model parameters
-<- normal(0, 10)
- int <- normal(0, 10)
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-# hierarchical model for species effect; use the first species as the baseline
-# like in lm()
-<- lognormal(0, 1)
- species_sd <- normal(0, species_sd, dim = 2)
- species_offset <- rbind(0, species_offset)
- species_effect <- as.numeric(iris$Species)
- species_id
-# model
-<- int + coef * iris$Sepal.Width + species_effect[species_id]
- mu distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-
-Random intercept-slope model
-A hierarchical, Bayesian linear regression model using the iris data,
-with random intercepts and slopes for each of the three species. The
-slopes and intercepts for each species are uncorrelated in this
-example.
-# linear model parameters
-<- normal(0, 10)
- int <- normal(0, 10)
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-<- as.numeric(iris$Species)
- species_id
-# random intercepts
-<- lognormal(0, 1)
- species_int_sd <- normal(0, species_int_sd, dim = 2)
- species_int <- rbind(0, species_int)
- species_int_eff
-# random slopes
-<- lognormal(0, 1)
- species_slope_sd <- normal(0, species_slope_sd, dim = 2)
- species_slope <- rbind(0, species_slope)
- species_slope_eff
-# model
-<- int + coef * iris$Sepal.Width + species_int_eff[species_id] + iris$Sepal.Width * species_slope_eff[species_id]
- mu distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-
Common Bayesian priors
The following examples show some common Bayesian priors of which some
induce sparsity.
-
-Improper flat prior
-A simple, one-variable Bayesian linear regression model that uses
-flat priors for the coefficients. A flat prior using
-variable
puts an unbounded uniform distribution on the
-parameter. With unconstrained flat priors, the posterior will be
-proportional to the likelihood and the MAP will correspond to the MLE.
-Flat priors are usually chosen when there is little knowledge about the
-parameters available.
-# variables & priors
-<- variable()
- int <- variable()
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Ridge prior
-Here we estimate a simple, one-variable Bayesian linear regression
-model that uses a ridge prior. The ridge prior has a
-frequentist interpretation where it is used as a penalty for regression
-coefficients. Among other effects, the penalty shrinks the coefficients
-towards zero to reduce variance without setting them to zero. The
-Bayesian version uses a normal distribution for the slopes and a inverse
-gamma prior for the strength of the penalty. Note that since the prior
-in our intercept is still improper, the joint prior is also
-improper.
-# variables & priors
-<- variable()
- int <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-<- inverse_gamma(1, 1)
- tau <- normal(0, tau)
- coef
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Exponential-normal prior
-In this example we infer the parameters of one-variable Bayesian
-linear regression model using an exponential-normal prior. A compound
-exponential-normal prior can be interpreted like an equivalent to the
-frequentist LASSO. The exponential-normal prior yields a posterior that
-is pooled towards zero. An exponential-normal prior, or equivalently a
-Laplace prior, is consequently often chosen when a sparse solution is
-assumed, which, for instance, is a natural scenario in many biological
-settings.
-# variables & priors
-<- variable()
- int <- inverse_gamma(1, 1)
- sd
-<- gamma(1, 1)
- lambda <- exponential(0.5 * lambda**2)
- tau <- normal(0, tau)
- coef
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-
-Horseshoe prior
-A simple, one-variable Bayesian linear regression model using a
-horseshoe prior. The horseshoe, just as the LASSO, can be used when the
-slopes are assumed to be sparse. According to the original publication:
-> its flat, Cauchy-like tails allow strong signals to remain large
-[…] > a posteriori. Yet its infinitely tall spike at the origin
-provides > severe shrinkage for the zero elements
-<- function (tau = 1, dim = NULL) {
- horseshoe <- cauchy(0, 1, truncation = c(0, Inf), dim = dim)
- lambda <- tau ^ 2 * lambda ^ 2
- sd normal(0, sd, dim = dim)
-
- }
-# variables & priors
-<- variable()
- int <- inverse_gamma(1, 1)
- sd <- horseshoe()
- coef
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-Regularized horseshoe prior
-The regularized (‘Finnish’) horseshoe (doi.org/10.1214/17-EJS1337SI)
-remedies a problem of the original horseshoe: large, unregularized
-values for the coefficients. This is especially problematic in scenarios
-where the parameters are only weakly identified by the data, as in
-logistic regression with perfectly seperable data.
-<- function (tau = 1, c = 1, dim = NULL) {
- regularized_horseshoe stopifnot(c > 0)
- <- cauchy(0, 1, truncation = c(0, Inf), dim = dim)
- lambda <- (c^2 * lambda^2) / (c^2 + tau^2 * lambda^2)
- lambda_tilde <- tau ^ 2 * lambda_tilde ^ 2
- sd normal(0, sd, dim = dim)
-
- }
-# variables & priors
-<- variable()
- int <- inverse_gamma(1, 1)
- sd <- regularized_horseshoe()
- coef
-# linear predictor
-<- int + coef * attitude$complaints
- mu
-# observation model
-distribution(attitude$rating) <- normal(mu, sd)
-
-
Advanced Bayesian models
Below are some more advanced examples implemented in greta.
-
-Hierarchical linear regression in general conditional
-formulation
-A hierarchical, Bayesian linear regression model using the iris data,
-with random intercepts and slopes for each of the three species. The
-slopes and intercepts for each species are correlated in this
-example. We allow every species to have a species specific slope for
-Sepal.Length
.
-<- normal(0, 10)
- int <- normal(0, 10)
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-<- length(unique(iris$Species))
- n_species <- as.numeric(iris$Species)
- species_id
-<- model.matrix(~ Species + Sepal.Length * Species - 1, data = iris)
- Z
-<- multivariate_normal(matrix(0, 1, 2),
- gamma_matrix diag(2),
- n_realisations = 3)
- <- c(gamma_matrix)
- gamma
-<- as_data(iris$Sepal.Width)
- wi <- as_data(Z)
- Z <- int + coef * wi + Z %*% gamma
- mu
-distribution(iris$Sepal.Length) <- normal(mu, sd)
-
-
-Hierarchical linear regression in general marginal formulation
-A hierarchical, Bayesian linear regression model using the iris data,
-with random intercepts and slopes for each of the three species. This
-time we try to set up the marginal model, i.e. when we
-integrate the conditional density.
-<- variable()
- int <- normal(0, 5)
- coef <- cauchy(0, 3, truncation = c(0, Inf))
- sd
-<- length(unique(iris$Species))
- n_species <- as.numeric(iris$Species)
- species_id
-<- model.matrix(~ Species + Sepal.Length * Species - 1, data = iris)
- Z <- zeros(n_species * 2, n_species * 2)
- G
-for (s in unique(species_id)) {
-c(s, s + n_species), c(s, s + n_species)] <- diag(2)
- G[
- }
-<- int + coef * iris$Sepal.Width
- mu <- zeros(nrow(iris), nrow(iris))
- V diag(V) <- sd
-
-<- as_data(Z)
- Z <- V + Z %*% G %*% t(Z)
- V
-<- t(iris$Sepal.Width)
- sep distribution(sep) <- multivariate_normal(t(mu), V)
-
-
-Bayesian neural network
-Bayesian neural network estimates an easy neural network
-with a normal prior on the edge weights. For clarity we use an
-architecture without a hidden layer, such that the weights actually
-correspond to coefficients in a linear regression model.
-N <- 100
-p <- 10
-
-set.seed(23)
-X <- matrix(rnorm(N * p), N)
-beta <- rnorm(10)
-y <- X %*% beta + rnorm(N, sd = 0.1)
-<- function(x)
- neural_network
- {# this can be arbitrarily complex, e.g. multiple hidden layers
- %*% weights
- x
- }
- <- normal(0, 1, dim = c(p, 1))
- weights <- inverse_gamma(1, 1)
- sd
-distribution(y) <- normal(neural_network(X), sd)
-
-Factor analysis
-Factor analysis is a linear latent model used for finding a
-lower-dimensional probabilistic description of a data set with
-observations \(\mathbf{x}_i \in
-\mathbb{R}^p\). We assume the data are generated according to
-\[
-\mathbf{x}_i = \mathbf{W} \mathbf{z}_i + \boldsymbol \mu + \epsilon_i
-\] where the noise \(\epsilon\)
-is normally distributed with zero mean and diagonal covariance matrix
-\(\Psi = \mathrm{diag}(\psi_1, \dots,
-\psi_p)\). The goal of factor analysis is to estimate the latent
-variables \(\mathbf{z}_i
-\mathbb{R}^q\).
-In this example we take the mean vector \(\boldsymbol \mu\) to be zero.
-
-generate.data <- function(n = 100, p = 5, q = 2, psi = diag(rgamma(p, 1, 1)))
-{
- W <- matrix(rnorm(p * q, 1), p, q)
- Z <- matrix(rnorm(q * n, 2), q, n)
- WZ <- W %*% Z
-
- X <- matrix(0, n, p)
- for (i in seq_len(n)) {
- X[i, ] <- MASS::mvrnorm(1, WZ[, i], psi)
- }
-
- list(X = X, W = W, Z = Z, psi = psi)
-}
-
-n <- 100
-p <- 5
-q <- 2
-data <- generate.data(n = n, p = p, q = q)
-X <- data$X
-
-<- normal(0, 1, dim = c(p, q))
- W <- normal(0, 1, dim = c(q, n))
- Z <- zeros(p, p)
- psi diag(psi) <- inverse_gamma(1, 1, dim = p)
-
-distribution(X) <- multivariate_normal(t(W %*% Z), psi)
-
-
BUGS models
The BUGS project provide a number of example models written in the
@@ -775,273 +277,20 @@
BUGS models
and likely also in JAGS. The Stan
wiki provides Stan implementations of these models.
The following sections provide greta implementations of some of these
-example models, alongside the BUGS code from WinBUGS
+example models, alongside the BUGS code from WinBUGS
examples volume 2 (pdf) and Stan code and an R version of the data
from the Stan
example models wiki.
-
-Air
-Air analyses reported respiratory illness versus exposure to
-nitrogen dioxide in 103 children. The parameters alpha
,
-beta
and sigma2
are known in advance, and the
-data are grouped into three categories.
-See WinBUGS
-examples volume 2 (pdf) for details.
-
-data
-y <- c(21, 20, 15)
-n <- c(48, 34, 21)
-Z <- c(10, 30, 50)
-alpha <- 4.48
-beta <- 0.76
-sigma2 <- 81.14
-sigma <- sqrt(sigma2)
-tau <- 1 / sigma2
-J <- 3
-
-
-greta code
-<- normal(0, 32, dim = 2)
- theta <- alpha + beta * Z
- mu <- normal(mu, sigma)
- X <- ilogit(theta[1] + theta[2] * X)
- p distribution(y) <- binomial(n, p)
-
-
-BUGS/JAGS code
-
-for(j in 1 : J) {
- y[j] ~ dbin(p[j], n[j])
- logit(p[j]) <- theta[1] + theta[2] * X[j]
- X[j] ~ dnorm(mu[j], tau)
- mu[j] <- alpha + beta * Z[j]
-}
-theta[1] ~ dnorm(0.0, 0.001)
-theta[2] ~ dnorm(0.0, 0.001)
-
-
-
-Stan code
-
-data {
- real alpha;
- real beta;
- real<lower=0> sigma2;
- int<lower=0> J;
- array[J] int y;
- vector[J] Z;
- array[J] int n;
-}
-transformed data {
- real<lower=0> sigma;
- sigma = sqrt(sigma2);
-}
-parameters {
- real theta1;
- real theta2;
- vector[J] X;
-}
-model {
- array[J] real p;
- theta1 ~ normal(0, 32); // 32^2 = 1024
- theta2 ~ normal(0, 32);
- X ~ normal(alpha + beta * Z, sigma);
- y ~ binomial_logit(n, theta1 + theta2 * X);
-}
-
-
-
-Beetles
-Beetles considers dose-response data from an experiment
-applying carbon disulphide to 8 beetles. The original example compares
-three different link functions; the logit, probit and complementary
-log-log. Here, only the code for the logit link is shown. You can
-implement the other two link functions in greta by changing
-ilogit
to iprobit
or
-icloglog
.
-See WinBUGS
-examples volume 2 (pdf) for details.
-
-data
-x <- c(1.6907, 1.7242, 1.7552, 1.7842, 1.8113, 1.8369, 1.8610, 1.8839)
-n <- c(59, 60, 62, 56, 63, 59, 62, 60)
-r <- c(6, 13, 18, 28, 52, 53, 61, 60)
-N <- 8
-
-
-greta code
-<- normal(0, 32)
- alpha_star <- normal(0, 32)
- beta <- ilogit(alpha_star + beta * (x - mean(x)))
- p distribution(r) <- binomial(n, p)
-
-<- alpha_star - beta * mean(x)
- alpha <- p * n rhat
-
-
-BUGS/JAGS code
-
-for( i in 1 : N ) {
- r[i] ~ dbin(p[i],n[i])
- logit(p[i]) <- alpha.star + beta * (x[i] - mean(x[]))
- rhat[i] <- n[i] * p[i]
- culmative.r[i] <- culmative(r[i], r[i])
-}
-alpha <- alpha.star - beta * mean(x[])
-beta ~ dnorm(0.0,0.001)
-alpha.star ~ dnorm(0.0,0.001)
-
-
-
-Stan code
-
-data {
- int<lower=0> N;
- array[N] int<lower=0> n;
- array[N] int<lower=0> r;
- vector[N] x;
-}
-transformed data {
- vector[N] centered_x;
- real mean_x;
- mean_x = mean(x);
- centered_x = x - mean_x;
-}
-parameters {
- real alpha_star;
- real beta;
-}
-transformed parameters {
- vector[N] m;
- m = alpha_star + beta * centered_x;
-}
-model {
- alpha_star ~ normal(0.0, 1.0E4);
- beta ~ normal(0.0, 1.0E4);
- r ~ binomial_logit(n, m);
-}
-generated quantities {
- real alpha;
- array[N] real p;
- array[N] real llike;
- array[N] real rhat;
- for (i in 1 : N) {
- p[i] = inv_logit(m[i]);
- llike[i] = r[i] * log(p[i]) + (n[i] - r[i]) * log(1 - p[i]);
- rhat[i] = p[i] * n[i]; // fitted values
- }
- alpha = alpha_star - beta * mean_x;
-}
-
-
-
-
Stan models
The following few code examples show how Stan code can be translated
in equivalent greta models.
-
-Lightspeed
-Lightspeed estimates a linear normal model without
-predictors. The data are 66 measurements from Simon Newcomb and
-represent the time required for light to travel roughly 7500 meters.
-See also the Stan
-examples for details.
-
-data
-y <- c(28, 26, 33, 24, 34, -44, 27, 16, 40, -2, 29, 22, 24, 21, 25,
- 30, 23, 29, 31, 19, 24, 20, 36, 32, 36, 28, 25, 21, 28, 29,
- 37, 25, 28, 26, 30, 32, 36, 26, 30, 22, 36, 23, 27, 27, 28,
- 27, 31, 27, 26, 33, 26, 32, 32, 24, 39, 28, 24, 25, 32, 25,
- 29, 27, 28, 29, 16, 23)
-n <- length(y)
-
-
-greta code
-<- variable()
- beta <- variable(lower = 0)
- sigma
-distribution(y) <- normal(beta, sigma)
-
-
-Stan code
-
-data {
- int<lower=0> N;
- vector[N] y;
-}
-parameters {
- vector[1] beta;
- real<lower=0> sigma;
-}
-model {
- y ~ normal(beta[1], sigma);
-}
-
-
-
-Eight schools
-Eight schools estimates the effect of coaching programs in
-eight schools. The data are 8 measurements of coaching effects along
-with their standard errors.
-See also the Stan
-example for details.
-
-data
-y <- c(28, 8, -3, 7, -1, 1, 18, 12)
-sigma_y <- c(15, 10, 16, 11, 9, 11, 10, 18)
-N <- length(y)
-
-
-greta code
-<- inverse_gamma(1, 1)
- sigma_eta <- normal(0, sigma_eta, dim=N)
- eta
-<- normal(0, 100)
- mu_theta <- normal(0, 5)
- xi <- mu_theta + xi * eta
- theta
-distribution(y) <- normal(theta, sigma_y)
-
-
-Stan code
-
-data {
- int<lower=0> N;
- vector[N] y;
- vector[N] sigma_y;
-}
-parameters {
- vector[N] eta;
- real mu_theta;
- real<lower=0, upper=100> sigma_eta;
- real xi;
-}
-transformed parameters {
- real<lower=0> sigma_theta;
- vector[N] theta;
-
- theta = mu_theta + xi * eta;
- sigma_theta = fabs(xi) / sigma_eta;
-}
-model {
- mu_theta ~ normal(0, 100);
- sigma_eta ~ inv_gamma(1, 1); //prior distribution can be changed to uniform
-
- eta ~ normal(0, sigma_eta);
- xi ~ normal(0, 5);
- y ~ normal(theta, sigma_y);
-}
-
-
-
-
Ecological models
Here we provide some examples of common ecological models. We begin
@@ -1052,404 +301,14 @@
Ecological models
multiple species: independently but concurrently modelled species,
partially pooled coefficients, repeated measures, and sub-models.
-
-Logistic regression
-A simple logistic regression being to estimate the probability of
-species presence along a number of environmental gradients.
-
-data
-# make fake data
-n_env <- 3
-n_sites <- 20
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species presence or absence
-occupancy <- rbinom(n_sites, 1, 0.5)
-
-
-greta code
-<- normal(0, 10)
- alpha <- normal(0, 10, dim = n_env)
- beta
-# logit-linear model
-<- alpha + env %*% beta
- linear_predictor <- ilogit(linear_predictor)
- p
-# distribution (likelihood) over observed values
-distribution(occupancy) <- bernoulli(p)
-
-
-
-Poisson regression
-An example of a simple poisson regression being used to estimate the
-abundance of a species along a number of environmental gradients.
-
-data
-# make fake data
-n_env <- 3
-n_sites <- 20
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species abundance
-occupancy <- rpois(n_sites, 5)
-
-
-greta code
-<- normal(0, 10)
- alpha <- normal(0, 10, dim = n_env)
- beta <- alpha + env %*% beta
- linear_predictor <- exp(linear_predictor)
- lambda distribution(occupancy) <- poisson(lambda)
-
-
-
-Logistic regression with error term
-This is an example of a simple logistic regression with an extra
-observation-level error term, to model over-dispersion or clustering in
-occupancy data from multiple visits.
-
-data
-# make fake data
-n_env <- 3
-n_sites <- 20
-n_obs <- 5
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_sites observations of species presence or absence over n_obs visits
-occupancy <- rbinom(n_sites, n_obs, 0.5)
-
-
-greta code
-<- normal(0, 10)
- alpha <- normal(0, 10, dim = n_env)
- beta <- normal(0, 10, dim = n_sites)
- error
-# logit-linear model with extra variation
-<- alpha + env %*% beta + error
- linear_predictor <- ilogit(linear_predictor)
- p
-# distribution (likelihood) over observed values
-distribution(occupancy) <- binomial(n_obs, p)
-
-
-
-Multiple species modelling independently and concurrently
-An example of a logistic regression being used to estimate the
-probability of multiple species’ presences along a number of
-environmental gradients. Although modelled concurrently, the random
-variables for each species are independent. We first simulate some data
-to model followed by the greta
code.
-Where a single observation per species and location would have a
-bernoulli error distribution, multiple observations for each species and
-location have a binomial distribution.
-When modelling multiple species (or other grouping factor), we need
-an extra step in constructing the linear predictor. In order to add
-multiple greta
arrays together for each species we
-can use the sweep()
function.
-
-data
-# make fake data
-n_species <- 5
-n_env <- 3
-n_sites <- 20
-
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-occupancy <- matrix(rbinom(n_species * n_sites, 1, 0.5), nrow = n_sites)
-
-
-greta code
-<- normal(0, 10, dim = n_species)
- alpha <- normal(0, 10, dim = c(n_env, n_species))
- beta
-<- env %*% beta
- env_effect
-# add intercepts for all species
-<- sweep(env_effect, 2, alpha, FUN = '+')
- linear_predictor
-# ilogit of linear predictor
-<- ilogit(linear_predictor)
- p
-# a single observation means our data are bernoulli distributed
-distribution(occupancy) <- bernoulli(p)
-
-
-
-Multiple species with partial pooling of regression
-coefficients
-An example of a logistic regression being used to estimate the
-probability of multiple species’ presences along a number of
-environmental gradients. Instead of assuming independence of species
-regression coefficients, we assume they are drawn from a shared
-distribution. We partially pool species responses. This gives us not ony
-the regression coefficients for each species but also a global average
-coefficient and a measure of variation between species responses to
-environmental gradients.
-
-data
-# make fake data
-n_species <- 5
-n_env <- 1
-n_sites <- 50
-
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-occupancy <- matrix(rbinom(n_sites * n_species, 1, 0.5), nrow = n_sites)
-
-
-greta code
-<- normal(0, 10, dim = 1)
- global_alpha <- uniform(0, 10, dim = 1)
- global_alpha_sd <- normal(global_alpha, global_alpha_sd, dim = n_species)
- alpha
-<- normal(0, 10, dim = n_env)
- global_betas <- uniform(0, 10, dim = n_env)
- global_betas_sd <- normal(global_betas, global_betas_sd, dim = c(n_env, n_species))
- beta
-<- env %*% beta
- env_effect
-# add intercepts for all species
-<- sweep(env_effect, 2, alpha, FUN = '+')
- linear_predictor
-# ilogit of linear predictor
-<- ilogit(linear_predictor)
- p
-distribution(occupancy) <- bernoulli(p)
-
-
-
-Multiple species with sub-model for regression coefficients
-An example of a logistic regression being used to estimate the
-probability of multiple species’ presences along a number of
-environmental gradients. Instead of assuming independence of species
-regression coefficients, or partial pooling in shared distributions, we
-use a sub-model to estimate species regression coefficients. In this
-case, we’re using species traits to estimate their response to different
-environmental gradients.
-Because we’re building a sub-model, it’s more efficient to simply add
-a column of ones to dataframes for the base model and sub-model. This is
-simply to prevent our code from becoming too cumbersome. If we didn’t
-want to use our sub-model to estimate the intercept, we would not need
-to include the column of ones in the environmental dataframe.
-
-data
-# make fake data
-n_species <- 3
-n_env <- 1
-n_sites <- 5
-n_traits <- 1
-
-# n_sites x n_env matrix of environmental variables
-env <- matrix(rnorm(n_sites * n_env), nrow = n_sites)
-# n_species * n_traits matix of trait variables
-traits <- matrix(rnorm(n_species * n_traits), nrow = n_species)
-# n_sites * n_species matrix of observed occupancy
-occupancy <- matrix(rbinom(n_sites * n_species, 1, 0.5), nrow = n_sites)
-
-
-greta code
-# include a column of 1's for intercept estimation in the sub-model (traits) and base model
-<- cbind(rep(1, n_species), traits)
- traits <- cbind(rep(1, n_sites), env)
- env
-# redefine n_env and n_traits after adding columns for intercepts
-<- ncol(env)
- n_env <- ncol(traits)
- n_traits
-# sub-model parameters have normal prior distributions
-<- normal(0, 10, dim = c(n_env, n_traits))
- g # parameters of the base model are a function of the parameters of the sub-model
-<- g %*% t(traits)
- beta
-# use the coefficients to get the model linear predictor
-<- env %*% beta
- linear_predictor
-# use the logit link to get probabilities of occupancy
-<- ilogit(linear_predictor)
- p
-# data are bernoulli distributed
-distribution(occupancy) <- bernoulli(p)
-
-
-
-Cormack-Jolly-Seber model
-Cormack-Jolly-Seber (CJS) models estimate probabilities of
-survival and recapture from mark-recapture data. These models assume
-that we can only ever see individuals that have been initially marked
-and released or recaptured following release (i.e. individuals do not
-exist until first observed). The two key parameters are survival, \(\phi\), and probability of recapture, \(p\). There is an additional derived
-parameter, \(\chi\), which is the
-probability that an individual is not recaptured following its final
-capture. \(\chi\) marginalises over
-multiple scenarios in which the individual is not observed either
-because it has died or because it is alive but not detected.
-The introductory
-book to the program MARK has a lot of information on mark-recapture
-models, including CJS models (starting in Ch. 1) and the broader class
-of Jolly-Seber models (Ch. 12). There is also a section on
-mark-recapture models in the Stan language
-manual, which goes through the derivation of the parameter \(\chi\).
-
-data
-
-n_obs <- 100
-n_time <- 20
-y <- matrix(sample(c(0, 1), size = (n_obs * n_time), replace = TRUE),
- ncol = n_time)
-
-
-
-greta code
-# data summaries
-<- apply(y, 1, function(x) min(which(x > 0)))
- first_obs <- apply(y, 1, function(x) max(which(x > 0)))
- final_obs <- apply(y, 1, function(x) seq(min(which(x > 0)), max(which(x > 0)), by = 1)[-1])
- obs_id <- unlist(obs_id)
- obs_id <- apply(y, 1, function(x) x[min(which(x > 0)):max(which(x > 0))][-1])
- capture_vec <- unlist(capture_vec)
- capture_vec
-# priors
-<- beta(1, 1, dim = n_time)
- phi <- beta(1, 1, dim = n_time)
- p
-# derived parameter
-<- ones(n_time)
- chi for (i in seq_len(n_time - 1)) {
-<- n_time - i
- tn <- (1 - phi[tn]) + phi[tn] * (1 - p[tn + 1]) * chi[tn + 1]
- chi[tn]
- }
-# dummy variables
-<- ones(length(obs_id)) # definitely alive
- alive_data <- final_obs != 20 # ignore observations in last timestep
- not_seen_last <- ones(sum(not_seen_last)) # final observation
- final_observation
-# set likelihoods
-distribution(alive_data) <- bernoulli(phi[obs_id - 1])
-distribution(capture_vec) <- bernoulli(p[obs_id])
-distribution(final_observation) <- bernoulli(chi[final_obs[not_seen_last]])
-
-
-BUGS/JAGS code
-
-model {
- # priors
- for (t in 1:(n_time - 1)) {
- phi[t] ~ dunif(0, 1)
- p[t] ~ dunif(0, 1)
- }
- # likelihood
- for (i in 1:n_obs) {
- z[i, first_obs[i]] <- 1 # state at first capture must be 1!
- for (t in (first_obs[i] + 1):n_time) {
- mu1[i, t] <- phi[t - 1] * z[i, t - 1]
- z[i, t] ~ dbern(mu1[i, t]) # true state
- mu2[i, t] <- p[t - 1] * z[i, t]
- y[i, t] ~ dbern(mu2[i, t]) # observed state
- }
- }
-}
-
-
-
-Stan code
-
-/**
- * Cormack-Jolly-Seber Model
- *
- * following section 1.2.1 of:
- * http://www.maths.otago.ac.nz/home/resources/theses/PhD_Matthew_Schofield.pdf
- *
- */
-data {
- int<lower=2> K; // capture events
- int<lower=0> I; // number of individuals
- array[I, K] int<lower=0, upper=1> X; // X[i,k]: individual i captured at k
-}
-transformed data {
- array[I] int<lower=0, upper=K + 1> first; // first[i]: ind i first capture
- array[I] int<lower=0, upper=K + 1> last; // last[i]: ind i last capture
- array[K] int<lower=0, upper=I> n_captured; // n_capt[k]: num aptured at k
-
- first = rep_array(K + 1, I);
- last = rep_array(0, I);
- for (i in 1 : I) {
- for (k in 1 : K) {
- if (X[i, k] == 1) {
- if (k < first[i]) {
- first[i] = k;
- }
- if (k > last[i]) {
- last[i] = k;
- }
- }
- }
- }
-
- n_captured = rep_array(0, K);
- for (i in 1 : I) {
- for (k in 1 : K) {
- n_captured[k] = n_captured[k] + X[i, k];
- }
- }
-}
-parameters {
- vector<lower=0, upper=1>[K - 1] phi; // phi[k]: Pr[alive at k + 1 | alive at k]
- vector<lower=0, upper=1>[K] p; // p[k]: Pr[capture at k]
-
- // note: p[1] not used in model and hence not identified
-}
-transformed parameters {
- vector<lower=0, upper=1>[K] chi; // chi[k]: Pr[no capture > k | alive at k]
- {
- int k;
- chi[K] = 1.0;
- k = K - 1;
- while (k > 0) {
- chi[k] = (1 - phi[k]) + phi[k] * (1 - p[k + 1]) * chi[k + 1];
- k = k - 1;
- }
- }
-}
-model {
- for (i in 1 : I) {
- if (last[i] > 0) {
- for (k in (first[i] + 1) : last[i]) {
- target += log(phi[k - 1]); // i survived from k-1 to k
- if (X[i, k] == 1) {
- target += log(p[k]);
- } // i captured at k
- else {
- target += log1m(p[k]);
- } // i not captured at k
- }
- target += log(chi[last[i]]); // i not seen after last[i]
- }
- }
-}
-generated quantities {
- // phi[K-1] and p(K) not identified, but product is
- real beta;
- vector<lower=0>[K] pop_hat; // population
-
- beta = phi[K - 1] * p[K];
-
- for (k in 1 : K) {
- pop_hat[k] = n_captured[k] / p[k];
- }
-}
-
-
-
diff --git a/revdep/library.noindex/greta/old/greta/doc/faq.R b/revdep/library.noindex/greta/old/greta/doc/faq.R
index 7354da3e..6143e308 100644
--- a/revdep/library.noindex/greta/old/greta/doc/faq.R
+++ b/revdep/library.noindex/greta/old/greta/doc/faq.R
@@ -1,4 +1,4 @@
-## ---- include = FALSE---------------------------------------------------------
+## ----include = FALSE----------------------------------------------------------
knitr::opts_chunk$set(
collapse = TRUE,
comment = "#>",
diff --git a/revdep/library.noindex/greta/old/greta/doc/faq.html b/revdep/library.noindex/greta/old/greta/doc/faq.html
index 0d128a69..49e81d23 100644
--- a/revdep/library.noindex/greta/old/greta/doc/faq.html
+++ b/revdep/library.noindex/greta/old/greta/doc/faq.html
@@ -29,23 +29,23 @@
+code{white-space: pre-wrap;}
+span.smallcaps{font-variant: small-caps;}
+span.underline{text-decoration: underline;}
+div.column{display: inline-block; vertical-align: top; width: 50%;}
+div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
+ul.task-list{list-style: none;}
+
+code{white-space: pre-wrap;}
+span.smallcaps{font-variant: small-caps;}
+span.underline{text-decoration: underline;}
+div.column{display: inline-block; vertical-align: top; width: 50%;}
+div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
+ul.task-list{list-style: none;}
+