From 7b9aa21d04dfa475e64f0dfbfca3366b5f830d5f Mon Sep 17 00:00:00 2001 From: mloubout Date: Mon, 12 Dec 2022 21:01:09 -0500 Subject: [PATCH] fix links --- .github/workflows/docs.yml | 2 +- .github/workflows/runtests.yml | 2 +- Project.toml | 2 +- docs/src/api.md | 40 +++++++++++++++++---------- docs/src/index.md | 46 ++++++++++--------------------- examples/networks/network_glow.jl | 2 ++ 6 files changed, 46 insertions(+), 48 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 59db6c50..5fd2884d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@latest diff --git a/.github/workflows/runtests.yml b/.github/workflows/runtests.yml index 3f45cad4..907c7c04 100644 --- a/.github/workflows/runtests.yml +++ b/.github/workflows/runtests.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout InvertibleNetworks.jl - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup julia uses: julia-actions/setup-julia@v1 diff --git a/Project.toml b/Project.toml index 251a0981..863374a6 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "InvertibleNetworks" uuid = "b7115f24-5f92-4794-81e8-23b0ddb121d3" authors = ["Philipp Witte ", "Ali Siahkoohi ", "Mathias Louboutin ", "Gabrio Rizzuti ", "Rafael Orozco ", "Felix J. herrmann "] -version = "2.2.1" +version = "2.2.2" [deps] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" diff --git a/docs/src/api.md b/docs/src/api.md index b430fae6..5e7222b7 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -1,36 +1,48 @@ -## Invertible Layers +# Invertible Networks API reference -### Types +```@autodocs +Modules = [InvertibleNetworks] +Order = [:function] +Pages = ["neuralnet.jl", "parameter.jl"] +``` + +## Activations functions ```@autodocs Modules = [InvertibleNetworks] -Order = [:type] -Filter = t -> t<:NeuralNetLayer +Order = [:function] +Pages = ["activation_functions.jl"] ``` -## Invertible Networks +## Dimensions manipulation + +```@autodocs +Modules = [InvertibleNetworks] +Order = [:function] +Pages = ["dimensionality_operations.jl"] +``` -### Types +## Layers ```@autodocs Modules = [InvertibleNetworks] -Order = [:type] -Filter = t -> t<:InvertibleNetwork +Order = [:type] +Filter = t -> t<:NeuralNetLayer ``` -## Activations functions +## Networks ```@autodocs Modules = [InvertibleNetworks] -Order = [:function] -Pages = ["activation_functions.jl"] +Order = [:type] +Filter = t -> t<:InvertibleNetwork ``` -## Dimensions manipulation +## AD Integration ```@autodocs Modules = [InvertibleNetworks] -Order = [:function] -Pages = ["dimensionality_operations.jl"] +Order = [:function] +Pages = ["chainrules.jl"] ``` \ No newline at end of file diff --git a/docs/src/index.md b/docs/src/index.md index b7a9c5aa..36424025 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -17,65 +17,49 @@ This package is developped and maintained by Felix J. Herrmann's [SlimGroup](htt ## Installation -To install this package you can either directly install it from its url: +THis package is registered in the Julia general registry and can be directly installed in the julia REPL package manager (`]`): ```julia -] add https://github.com/slimgroup/InvertibleNetworks.jl -``` - -or if you wish to have access to all [slimgroup](https://github.com/slimgroup/)' softwares you can add our registry to have access to our packages in the standard julia way: - - -```julia -] registry add https://Github.com/slimgroup/SLIMregistryJL.git -] add InvertibleNetworks +] add/dev InvertibleNetworks ``` ## References - - Yann Dauphin, Angela Fan, Michael Auli and David Grangier, "Language modeling with gated convolutional networks", Proceedings of the 34th International Conference on Machine Learning, 2017. https://arxiv.org/pdf/1612.08083.pdf + - Yann Dauphin, Angela Fan, Michael Auli and David Grangier, "Language modeling with gated convolutional networks", Proceedings of the 34th International Conference on Machine Learning, 2017. [ArXiv](https://arxiv.org/pdf/1612.08083.pdf) - - Laurent Dinh, Jascha Sohl-Dickstein and Samy Bengio, "Density estimation using Real NVP", International Conference on Learning Representations, 2017, https://arxiv.org/abs/1605.08803 + - Laurent Dinh, Jascha Sohl-Dickstein and Samy Bengio, "Density estimation using Real NVP", International Conference on Learning Representations, 2017, [ArXiv](https://arxiv.org/abs/1605.08803) - - Diederik P. Kingma and Prafulla Dhariwal, "Glow: Generative Flow with Invertible 1x1 Convolutions", Conference on Neural Information Processing Systems, 2018. https://arxiv.org/abs/1807.03039 + - Diederik P. Kingma and Prafulla Dhariwal, "Glow: Generative Flow with Invertible 1x1 Convolutions", Conference on Neural Information Processing Systems, 2018. [ArXiv](https://arxiv.org/abs/1807.03039) - - Keegan Lensink, Eldad Haber and Bas Peters, "Fully Hyperbolic Convolutional Neural Networks", arXiv Computer Vision and Pattern Recognition, 2019. https://arxiv.org/abs/1905.10484 + - Keegan Lensink, Eldad Haber and Bas Peters, "Fully Hyperbolic Convolutional Neural Networks", arXiv Computer Vision and Pattern Recognition, 2019. [ArXiv](https://arxiv.org/abs/1905.10484) - - Patrick Putzky and Max Welling, "Invert to learn to invert", Advances in Neural Information Processing Systems, 2019. https://arxiv.org/abs/1911.10914 + - Patrick Putzky and Max Welling, "Invert to learn to invert", Advances in Neural Information Processing Systems, 2019. [ArXiv](https://arxiv.org/abs/1911.10914) - - Jakob Kruse, Gianluca Detommaso, Robert Scheichl and Ullrich Köthe, "HINT: Hierarchical Invertible Neural Transport for Density Estimation and Bayesian Inference", arXiv Statistics and Machine Learning, 2020. https://arxiv.org/abs/1905.10687 + - Jakob Kruse, Gianluca Detommaso, Robert Scheichl and Ullrich Köthe, "HINT: Hierarchical Invertible Neural Transport for Density Estimation and Bayesian Inference", arXiv Statistics and Machine Learning, 2020. [ArXiv](https://arxiv.org/abs/1905.10687) ## Related work and publications The following publications use [InvertibleNetworks.jl]: -- **[“Preconditioned training of normalizing flows for variational inference in inverse problems”]** +- **[“Preconditioned training of normalizing flows for variational inference in inverse problems”](https://slim.gatech.edu/content/preconditioned-training-normalizing-flows-variational-inference-inverse-problems)** - paper: [https://arxiv.org/abs/2101.03709](https://arxiv.org/abs/2101.03709) - [presentation](https://slim.gatech.edu/Publications/Public/Conferences/AABI/2021/siahkoohi2021AABIpto/siahkoohi2021AABIpto_pres.pdf) - - code: [FastApproximateInference.jl] + - code: [FastApproximateInference.jl](https://github.com/slimgroup/Software.siahkoohi2021AABIpto) -- **["Parameterizing uncertainty by deep invertible networks, an application to reservoir characterization"]** +- **["Parameterizing uncertainty by deep invertible networks, an application to reservoir characterization"](https://slim.gatech.edu/content/parameterizing-uncertainty-deep-invertible-networks-application-reservoir-characterization)** - paper: [https://arxiv.org/abs/2004.07871](https://arxiv.org/abs/2004.07871) - [presentation](https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/rizzuti2020SEGuqavp/rizzuti2020SEGuqavp_pres.pdf) - code: [https://github.com/slimgroup/Software.SEG2020](https://github.com/slimgroup/Software.SEG2020) -- **["Generalized Minkowski sets for the regularization of inverse problems"]** +- **["Generalized Minkowski sets for the regularization of inverse problems"](https://slim.gatech.edu/content/generalized-minkowski-sets-regularization-inverse-problems-1)** - paper: [http://arxiv.org/abs/1903.03942](http://arxiv.org/abs/1903.03942) - - code: [SetIntersectionProjection.jl] + - code: [SetIntersectionProjection.jl](https://github.com/slimgroup/SetIntersectionProjection.jl) ## Acknowledgments This package uses functions from [NNlib.jl](https://github.com/FluxML/NNlib.jl), [Flux.jl](https://github.com/FluxML/Flux.jl) and [Wavelets.jl](https://github.com/JuliaDSP/Wavelets.jl) -[Flux]:https://fluxml.ai -[Julia]:https://julialang.org -[Zygote]:https://github.com/FluxML/Zygote.jl -[ChainRules]:https://github.com/JuliaDiff/ChainRules.jl -[InvertibleNetworks.jl]:https://github.com/slimgroup/InvertibleNetworks.jl -[“Preconditioned training of normalizing flows for variational inference in inverse problems”]:https://slim.gatech.edu/content/preconditioned-training-normalizing-flows-variational-inference-inverse-problems -[FastApproximateInference.jl]:https://github.com/slimgroup/Software.siahkoohi2021AABIpto -["Generalized Minkowski sets for the regularization of inverse problems"]:https://slim.gatech.edu/content/generalized-minkowski-sets-regularization-inverse-problems-1 -[SetIntersectionProjection.jl]:https://github.com/slimgroup/SetIntersectionProjection.jl -["Parameterizing uncertainty by deep invertible networks, an application to reservoir characterization"]:https://slim.gatech.edu/content/parameterizing-uncertainty-deep-invertible-networks-application-reservoir-characterization + + diff --git a/examples/networks/network_glow.jl b/examples/networks/network_glow.jl index 587c3ea1..371b5b3b 100644 --- a/examples/networks/network_glow.jl +++ b/examples/networks/network_glow.jl @@ -6,6 +6,7 @@ using InvertibleNetworks, LinearAlgebra, Flux import Flux.Optimise.update! device = InvertibleNetworks.CUDA.functional() ? gpu : cpu + # Define network nx = 64 # must be multiple of 2 ny = 64 @@ -31,6 +32,7 @@ end # Evaluate loss f = loss(X) +@time loss(X) # Update weights opt = Flux.ADAM()