diff --git a/.github/workflows/format_check.yml b/.github/workflows/format_check.yml index 4e46a44..eb8e7a3 100644 --- a/.github/workflows/format_check.yml +++ b/.github/workflows/format_check.yml @@ -3,8 +3,14 @@ name: Format Check on: push: branches: ["master", "dev", "format"] + paths: + - '.github/workflows/format_check.yml' + - '*.jl' pull_request: branches: ["master", "dev"] + paths: + - '.github/workflows/format_check.yml' + - '*.jl' jobs: check: runs-on: ${{ matrix.os }} diff --git a/.github/workflows/make_docs.yml b/.github/workflows/make_docs.yml new file mode 100644 index 0000000..81d4374 --- /dev/null +++ b/.github/workflows/make_docs.yml @@ -0,0 +1,44 @@ +name: Make Docs +on: + pull_request: + branches: ["master", "dev"] + paths: + - '.github/workflows/make_docs.yml' + - 'src/' + - 'docs/**' + push: + branches: + - master + - dev + - docs + paths: + - '.github/workflows/make_docs.yml' + - 'src/' + - 'docs/**' + tags: '*' + workflow_dispatch: + +jobs: + make_docs: + permissions: + actions: write + contents: write + statuses: write + name: Documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@latest + - uses: julia-actions/cache@v1 + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch + - name: Install dependencies + run: | + julia --project=docs/ -e 'using Pkg; Pkg.add(; url="https://github.com/ProjectTorreyPines/IMASDD.jl.git"); Pkg.add(; url="https://github.com/ProjectTorreyPines/GGDUtils.jl.git", rev="${{ steps.extract_branch.outputs.branch }}"); Pkg.add(; url="https://github.com/ProjectTorreyPines/SOLPS2IMAS.jl.git", rev="${{ steps.extract_branch.outputs.branch }}"); Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' + - name: Build and deploy + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} + run: julia --project=docs/ docs/make.jl diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..b47058c --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,55 @@ +name: Test + +on: + push: + branches: ["master", "dev", "autotest"] + paths: + - '.github/workflows/test.yml' + - 'src/' + - 'test/' + pull_request: + branches: ["master", "dev"] + paths: + - '.github/workflows/test.yml' + - 'src/' + - 'test/' +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + julia-version: [1.9.3] + julia-arch: [x86] + os: [ubuntu-latest] + steps: + - uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia-version }} + - uses: actions/checkout@v4 + - uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: | + ${{ secrets.SOLPSTESTSAMPLES_SSH_KEY}} + ${{ secrets.DVC_SSH_KEY }} + - name: Configure ssh + run: | + echo "${{ secrets.DVC_KNOWN_HOSTS }}" >> ~/.ssh/known_hosts + echo "${{ secrets.DVC_SSH_CONFIG }}" >> ~/.ssh/config + - uses: iterative/setup-dvc@v1 + - name: DVC Pull + run: | + dvc pull + - uses: julia-actions/cache@v1 + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch + - name: Install dependencies + run: | + julia --project=. -e 'using Pkg; Pkg.rm(["IMASDD", "GGDUtils", "SOLPS2IMAS", "EFIT"]); Pkg.add(; url="https://github.com/ProjectTorreyPines/IMASDD.jl.git"); Pkg.add(; url="https://github.com/ProjectTorreyPines/GGDUtils.jl.git", rev="${{ steps.extract_branch.outputs.branch }}"); Pkg.add(; url="https://github.com/ProjectTorreyPines/SOLPS2IMAS.jl.git", rev="${{ steps.extract_branch.outputs.branch }}"); Pkg.add(; url="https://github.com/JuliaFusion/EFIT.jl.git")' + - uses: julia-actions/julia-runtest@v1 + # Not set up yet + # - uses: julia-actions/julia-processcoverage@v1 + # - uses: codecov/codecov-action@v4 + # with: + # files: lcov.info \ No newline at end of file diff --git a/.gitignore b/.gitignore index 73c794c..d5946fc 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,5 @@ # environment. Manifest.toml sd_input_data.json +example/*.toml +docs/build diff --git a/LICENSE b/LICENSE index 28143ce..171d76c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,201 @@ -MIT License - -Copyright (c) 2023 Project Torrey Pines - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 General Atomics + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE.md b/NOTICE.md new file mode 100644 index 0000000..01d03d0 --- /dev/null +++ b/NOTICE.md @@ -0,0 +1,15 @@ +GGDUtils.jl +========= + +The purpose of this NOTICE file is to provide legal notices and acknowledgments that must be displayed to users in any derivative works or distributions. This file does not alter the terms of the Apache 2.0 license that governs the use and distribution of the GGDUtils.jl package. + +GGDUtils.jl was originally developed under the ProjectTorreyPines by the Magnetic Fusion Energy group at General Atomics. + +If this software contributes to an academic publication, please cite it as follows: +A. Gupta, D. Eldon, H. Anand, A. Dautt-Silva, S. De Pascuale, J. Lore, O. Meneghini, and J.S. Park, "Plasma boundary control development using a time-dependent scrape-off layer model in closed-loop simulations", Nucl. Mater. Energy, manuscript in preparation for PSI conference (2024). + +The names "General Atomics", and any associated logos or images, are trademarks of General Atomics. Use of these trademarks without prior written consent from General Atomics is strictly prohibited. Users cannot imply endorsement by General Atomics or contributors to the project simply because the project is part of their work. + +Copyright (c) 2024 General Atomics + +Version: v1.0 \ No newline at end of file diff --git a/Project.toml b/Project.toml index a3e1890..ba9502f 100644 --- a/Project.toml +++ b/Project.toml @@ -1,21 +1,22 @@ name = "SD4SOLPS" uuid = "d8db6f1b-e564-4c04-bba3-ef399f78c070" authors = ["David Eldon "] -version = "0.2.0" +version = "1.0.0" [deps] ArgParse = "c7e460c6-2fb9-53a9-8c5b-16f535851c63" Contour = "d38c429a-6771-53c6-b99e-75d170b6e991" EFIT = "cda752c5-6b03-55a3-9e33-132a441b0c17" GGDUtils = "b7b5e640-9b39-4803-84eb-376048795def" +IMASDD = "06b86afa-9f21-11ec-2ef8-e51b8960cfc5" Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -OMAS = "91cfaa06-6526-4804-8666-b540b3feef2f" PhysicalConstants = "5ad8b20f-a522-5ce9-bfc9-ddf1d5bda6ab" PlotUtils = "995b91a9-d308-5afd-9ec6-746e21dbc043" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" PolygonOps = "647866c9-e3ac-4575-94e7-e3d426903924" SOLPS2IMAS = "09becab6-0636-4c23-a92a-2b3723265c31" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d" YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6" diff --git a/README.md b/README.md index ed84d70..72ed1cb 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,9 @@ # SD4SOLPS + +![Format Check](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/actions/workflows/format_check.yml/badge.svg) +![Docs](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/actions/workflows/make_docs.yml/badge.svg) +![Tests](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/actions/workflows/test.yml/badge.svg) + Synthetic diagnostic workflow manager for use with SOLPS models This repository is the top level layer for managing a workflow for calculating @@ -10,11 +15,45 @@ Steps: 3) Make assumptions to extend profiles into the core and far SOL, if needed 4) Run synthetic diagnostic models and record output +For installation and usage instructions, see the [online documentation](https://projecttorreypines.github.io/SD4SOLPS.jl/stable). For documentation on under development branch, see [dev online documentation](https://projecttorreypines.github.io/SD4SOLPS.jl/dev). + +## Examples + +Refer to the instructions on this [wiki page](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/wiki/Demo) to see how to run `examples/demo.ipynb`. + +## Building julia environment for SD4SOLPS for development -## Installation +### Cloning using ssh + +It is recommended to setup github access for your account using a ssh key. Please follow +github intstructions for [connecting to github with ssh](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account). The make file below and other steps uses ssh clone url and would +fail unless you have access to github setup using ssh key. If you prefer to use password for login, correct clone urls accordingly. + +After cloning this repo, check the make menu: +``` +SD4SOLPS.jl % make help +Help Menu + +make env_with_cloned_repo (or make r): Creates a Julia environment with the cloned repositories +make env_with_git_url (or make u): Creates a Julia environment with the git urls without creating local clones +make clean: Deletes Project.toml and Manifest.toml for a fresh start +``` + +### make r +This option creates local copies of required private repositories at the same level as current repository and uses them in develop mode to create a Manifest.toml + +### make u +This option uses url of required private repositories to create a static Manifest.toml attached to current master branches of these repositories. + +### make clean +Deletes Manifest.toml so that environment can be recreated, to update or change the last used method. + +## Example environment with all repositories ### Using make file +This option only works on and has been tested on macOS and unix. If you have windows, please use the [manual instructions](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/wiki) instead. + #### Option 1: Download the [example directory](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/tree/master/example) in this repo: ```bash @@ -54,10 +93,6 @@ make Clean: Deletes Project.toml, Manifest.toml, and any cloned repositories for Further options are same as above except for the difference that in case of cloning local copies of repos, they will be kept on same level as where you cloned SD4SOLPS.jl repo. -### Manual Install +### Manual Install for development Refer to the instructions on this [wiki page](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/wiki). - -## Use - -Refer to the instructions on this [wiki page](https://github.com/ProjectTorreyPines/SD4SOLPS.jl/wiki/Demo). diff --git a/docs/Project.toml b/docs/Project.toml new file mode 100644 index 0000000..9311ade --- /dev/null +++ b/docs/Project.toml @@ -0,0 +1,2 @@ +[deps] +Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" \ No newline at end of file diff --git a/docs/make.jl b/docs/make.jl new file mode 100644 index 0000000..4740ba7 --- /dev/null +++ b/docs/make.jl @@ -0,0 +1,17 @@ +using Documenter +using SD4SOLPS + +makedocs(; + modules=[SD4SOLPS], + format=Documenter.HTML(), + sitename="SD4SOLPS", + checkdocs=:none, +) + +deploydocs(; + repo="github.com/ProjectTorreyPines/SD4SOLPS.jl.git", + target="build", + branch="gh-pages", + devbranch="dev", + versions=["stable" => "v^", "v#.#"], +) diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 0000000..1ca0106 --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,100 @@ + +# SD4SOLPS.jl + +```@contents +Pages = ["index.md"] +Depth = 3 +``` + +This repository serves as the top most workflow manager with helpful utilities to use other repositories in this project. + +## Documentation of other repositories in this project + +### [GGDUtils.jl](https://projecttorreypines.github.io/GGDUtils.jl/stable) + +### [SOLPS2IMAS.jl](https://projecttorreypines.github.io/SOLPS2IMAS.jl/stable) + +### [SynthDiag.jl](https://projecttorreypines.github.io/SynthDiag.jl/stable) + +## Installation + +### Using make: +After cloning this repo, check the make menu: +``` +SD4SOLPS.jl % make help +Help Menu + +make env_with_cloned_repo (or make r): Creates a Julia environment with the cloned repositories +make env_with_git_url (or make u): Creates a Julia environment with the git urls without creating local clones +make clean: Deletes Project.toml and Manifest.toml for a fresh start +``` + +#### make r +This option creates local copies of required private repositories at the same level as current repository and uses them in develop mode to create a Manifest.toml + +#### make u +This option uses url of required private repositories to create a static Manifest.toml attached to current master branches of these repositories. + +#### make clean +Deletes Manifest.toml so that environment can be recreated, to update or change the last used method. + +### Using Julia REPL and installing using Github url + +```julia +julia> using Pkg; +julia> Pkg.add(; url="https://github.com/ProjectTorreyPines/IMASDD.jl.git"); +julia> Pkg.add(; url="https://github.com/ProjectTorreyPines/GGDUtils.jl.git"); +julia> Pkg.add(; url="https://github.com/ProjectTorreyPines/SOLPS2IMAS.jl.git"); +julia> Pkg.add(; url="https://github.com/JuliaFusion/EFIT.jl.git"); +julia> Pkg.add(; url="https://github.com/ProjectTorreyPines/SD4SOLPS.jl.git"); +julia> Pkg.instantiate() +``` + +You might have to use ssh url instead of https. In that case, replace `https://github.com` with `git@github.com:`. + +## Top file handling functions + +```@docs +find_files_in_allowed_folders +geqdsk_to_imas! +preparation +``` + +## Repairing/filling out partial equilibrium files + +Tools for repairing/filling out partial equilibrium files. + +Some of the added fields may not be totally accurate, so it is recommended to +use this tool mainly for test cases, as a utility. For a real equilibrium, +problems should be fixed properly. + +```@docs +add_rho_to_equilibrium! +check_rho_1d +``` + +## Extrapolations + +Utilities for extrapolating profiles + +### Core profile extrapolations + +```@docs +extrapolate_core +fill_in_extrapolated_core_profile! +``` + +### Edge profiles extrapolations + +These functions have not been fully tested and/or supported yet. + +```@docs +mesh_psi_spacing +cached_mesh_extension! +``` + +## Unit conversion utilities + +```@docs +gas_unit_converter +``` \ No newline at end of file diff --git a/example/create_env_with_clone.sh b/example/create_env_with_clone.sh deleted file mode 100644 index f1c4225..0000000 --- a/example/create_env_with_clone.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -is_repo=$(git rev-parse --is-inside-work-tree 2>/dev/null) -if [[ ${is_repo} == "true" ]] -then - SD4SOLPS_path="$(git rev-parse --show-toplevel)" - project_path="$(dirname ${SD4SOLPS_path})" -else - SD4SOLPS_path="" - project_path="$(dirname $(pwd))" -fi -echo "Cloning git repositories to "$project_path -orig_path=$(pwd) -cd $project_path -if [[ -z $SD4SOLPS_path ]] -then - git clone git@github.com:ProjectTorreyPines/SD4SOLPS.jl.git - SD4SOLPS_path=$project_path"/SD4SOLPS.jl" -fi -# in a for loop clone SynthDiag.jl SOLPS2IMAS.jl OMAS.jl GGDUtils.jl -for repo in SynthDiag.jl SOLPS2IMAS.jl OMAS.jl GGDUtils.jl -do - if [[ ! -d $repo ]] - then - git clone "git@github.com:ProjectTorreyPines/"$repo".git" - fi -done -cd $orig_path - -echo "Generating Project.toml and Manifest.toml" -OMAS_path=$project_path"/OMAS.jl" -EFIT_url="git@github.com:JuliaFusion/EFIT.jl.git" -julia --project=. -e 'using Pkg; Pkg.develop(path="'$OMAS_path'"); Pkg.instantiate()' -julia --project=. -e 'using Pkg; Pkg.add(url="'$EFIT_url'", rev="master"); Pkg.instantiate()' -for repo in GGDUtils SOLPS2IMAS SynthDiag SD4SOLPS; do - repo_path=$project_path"/"$repo".jl" - julia --project=. -e 'using Pkg; Pkg.develop(path="'${repo_path}'"); Pkg.instantiate()' -done \ No newline at end of file diff --git a/example/create_env_with_git.sh b/example/create_env_with_git.sh deleted file mode 100644 index 38188f9..0000000 --- a/example/create_env_with_git.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -echo "Generating Project.toml and Manifest.toml" -OMAS_url="git@github.com:ProjectTorreyPines/OMAS.jl.git" -EFIT_url="git@github.com:JuliaFusion/EFIT.jl.git" -julia --project=. -e 'using Pkg; Pkg.add(url="'$OMAS_url'", rev="master"); Pkg.instantiate()' -julia --project=. -e 'using Pkg; Pkg.add(url="'$EFIT_url'", rev="master"); Pkg.instantiate()' -for repo in GGDUtils SOLPS2IMAS SynthDiag SD4SOLPS; do - repo_url="git@github.com:ProjectTorreyPines/"$repo".jl.git" - julia --project=. -e 'using Pkg; Pkg.add(url="'$repo_url'", rev="master"); Pkg.instantiate()' -done \ No newline at end of file diff --git a/example/demo.ipynb b/example/demo.ipynb index 6841ccf..45420d1 100644 --- a/example/demo.ipynb +++ b/example/demo.ipynb @@ -8,6 +8,15 @@ "source": [ "using Pkg;\n", "Pkg.activate(\"./\")\n", + "\n", + "# Can comment these after the first run\n", + "Pkg.add(; url=\"git@github.com:ProjectTorreyPines/IMASDD.jl.git\");\n", + "Pkg.add(; url=\"git@github.com:ProjectTorreyPines/GGDUtils.jl.git\");\n", + "Pkg.add(; url=\"git@github.com:ProjectTorreyPines/SOLPS2IMAS.jl.git\");\n", + "Pkg.add(; url=\"git@github.com:ProjectTorreyPines/SynthDiag.jl.git\");\n", + "Pkg.add(; url=\"git@github.com:JuliaFusion/EFIT.jl.git\");\n", + "Pkg.add(; url=\"git@github.com:ProjectTorreyPines/SD4SOLPS.jl.git\");\n", + "\n", "Pkg.instantiate()" ] }, @@ -33,10 +42,14 @@ "metadata": {}, "outputs": [], "source": [ - "b2gmtry = \"samples/b2fgmtry\"\n", - "b2output = \"samples/b2time.nc\"\n", - "gsdesc = \"samples/gridspacedesc.yml\"\n", - "b2mn = \"samples/b2mn.dat\"" + "b2gmtry = \"../sample/ITER_Lore_2296_00000/baserun/b2fgmtry\"\n", + "b2output = \"../sample/ITER_Lore_2296_00000/run_time_dep_EIRENE_jdl_to_ss_cont_sine2_2d_output/b2time.nc\"\n", + "b2mn = \"../sample/ITER_Lore_2296_00000/run_time_dep_EIRENE_jdl_to_ss_cont_sine2_2d_output/b2mn.dat\"\n", + "fort = (\n", + " \"../sample/ITER_Lore_2296_00000/baserun/fort.33\",\n", + " \"../sample/ITER_Lore_2296_00000/baserun/fort.34\",\n", + " \"../sample/ITER_Lore_2296_00000/baserun/fort.35\",\n", + " )" ] }, { @@ -45,7 +58,7 @@ "metadata": {}, "outputs": [], "source": [ - "ids = SOLPS2IMAS.solps2imas(b2gmtry, b2output, gsdesc, b2mn)" + "ids = SOLPS2IMAS.solps2imas(b2gmtry, b2output; b2mn=b2mn, fort=fort);" ] }, { @@ -78,8 +91,8 @@ "metadata": {}, "outputs": [], "source": [ - "grid_ggd = ids.edge_profiles.grid_ggd[1] # First grid_ggd time slice. It is allowed to vary in time\n", - "space = grid_ggd.space[1] # First space in this grid_ggd" + "grid_ggd = ids.edge_profiles.grid_ggd[1]; # First grid_ggd time slice. It is allowed to vary in time\n", + "space = grid_ggd.space[1]; # First space in this grid_ggd" ] }, { @@ -102,25 +115,25 @@ "plot(space) # Simply plot the grid described in space, all common arguments to plot can be given here\n", "\n", "# You can overlay any subset by giving a second argument\n", - "# Labels \n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 6), markercolor=:chocolate1)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 7), linecolor=:red, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 8), linecolor=:darkred, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 9), linecolor=:limegreen, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 10), linecolor=:darkgreen, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 11), linecolor=:cyan, linewidth=2)\n", - "# plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 12), linecolor=:teal, linewidth=1)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 13), linecolor=:royalblue1, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 14), linecolor=:navyblue, linewidth=2)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 15), linecolor=:fuchsia, linewidth=2, linestyle=:dash)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 16), linecolor=:purple4, linewidth=2, linestyle=:dash)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 101), markershape=:rect, markercolor=:royalblue1)\n", - "# plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 102), markershape=:rect, markercolor=:maroon)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 103), markershape=:diamond, markercolor=:fuchsia)\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 104), markershape=:diamond, markercolor=:purple4)\n", + "# Labels\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"x_points\"), markercolor=:chocolate1, label=\"X-point\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"core_cut\"), linecolor=:red, linewidth=2, label=\"Core Cut\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"PFR_cut\"), linecolor=:darkred, linewidth=2, label=\"PFR Cut\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"outer_throat\"), linecolor=:limegreen, linewidth=2, label=\"Outer Throat\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"inner_throat\"), linecolor=:darkgreen, linewidth=2, label=\"Inner Throat\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"outer_midplane\"), linecolor=:cyan, linewidth=2, label=\"Outer midplane\")\n", + "# plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"inner_midplane\"), linecolor=:teal, linewidth=2, label=\"Inner midplane\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"outer_target\"), linecolor=:royalblue1, linewidth=2, label=\"Outer target\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"inner_target\"), linecolor=:navyblue, linewidth=2, label=\"Inner target\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"core_boundary\"), linecolor=:fuchsia, linewidth=2, linestyle=:dash, label=\"Core boundary\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, \"separatrix\"), linecolor=:purple4, linewidth=2, linestyle=:dash, label=\"Separatrix\")\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, 101), markershape=:rect, markercolor=:royalblue1)\n", + "# plot!(space, GGDUtils.get_grid_subset(grid_ggd, 102), markershape=:rect, markercolor=:maroon)\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, 103), markershape=:diamond, markercolor=:fuchsia)\n", + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, 104), markershape=:diamond, markercolor=:purple4)\n", "\n", "# Legend is supressed unless asked for specifically\n", - "plot!(legend=true)\n", + "plot!(legend=true, left_margin=10Plots.pt)\n", "# Default labels are subset.identifier.name but can be changed by providing a label argument\n" ] }, @@ -141,8 +154,9 @@ "gr() # Fast and can save pdf\n", "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", - "n_e = GGDUtils.get_prop_with_grid_subset_index(ids.edge_profiles.ggd[1].electrons.density, 5)\n", - "plot(ids.edge_profiles.grid_ggd, n_e, colorbar_title=\"Electrons density / m^(-3)\")" + "n_e = GGDUtils.get_prop_with_grid_subset_index(ids.edge_profiles.ggd[1].electrons.density, -5)\n", + "plot(ids.edge_profiles.grid_ggd, n_e, colorbar_title=\"Electrons density / m^(-3)\",\n", + " left_margin=10Plots.pt)" ] }, { @@ -164,7 +178,8 @@ "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", "plot(ids.edge_profiles.grid_ggd, n_e) # Note default label in colorbar\n", - "plot!(space, GGDUtils.get_grid_subset_with_index(grid_ggd, 16), linecolor=:black, linewidth=2, linestyle=:solid, label=\"Separatix\", legend=true)" + "plot!(space, GGDUtils.get_grid_subset(grid_ggd, 16), linecolor=:black, linewidth=2,\n", + " linestyle=:solid, label=\"Separatix\", legend=true, left_margin=10Plots.pt)" ] }, { @@ -189,8 +204,8 @@ "metadata": {}, "outputs": [], "source": [ - "eqdsk = \"samples/g002296.00200\"\n", - "SD4SOLPS.geqdsk_to_imas!(eqdsk, ids)" + "eqdsk = \"../sample/ITER_Lore_2296_00000/EQDSK/g002296.00200\"\n", + "SD4SOLPS.geqdsk_to_imas!(eqdsk, ids; set_time=0.2)" ] }, { @@ -206,28 +221,11 @@ "metadata": {}, "outputs": [], "source": [ - "SD4SOLPS.fill_in_extrapolated_core_profile!(ids, \"electrons.density\"; method=\"simple\")\n", - "SD4SOLPS.fill_in_extrapolated_core_profile!(ids, \"electrons.temperature\"; method=\"simple\")\n", + "SD4SOLPS.fill_in_extrapolated_core_profile!(ids, \"electrons.density\"; method=\"simple\", cell_subset_idx=-5)\n", + "SD4SOLPS.fill_in_extrapolated_core_profile!(ids, \"electrons.temperature\"; method=\"simple\", cell_subset_idx=-5)\n", "# ... more profiles here as they become available in b2time" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Extend mesh outside SOLPS mesh to the device wall" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "SD4SOLPS.cached_mesh_extension!(ids, eqdsk, b2gmtry)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -257,7 +255,7 @@ "metadata": {}, "outputs": [], "source": [ - "SynthDiag.add_interferometer!(\"samples/default_interferometer.json\", ids)" + "SynthDiag.add_interferometer!(SynthDiag.default_ifo, ids; n_e_gsi=-5);" ] }, { @@ -279,7 +277,7 @@ "\n", "plot(space)\n", "plot!(ids.interferometer) # Default plot_type is :los \n", - "plot!(legend=true)" + "plot!(legend=true, left_margin=10Plots.pt)" ] }, { @@ -301,7 +299,7 @@ "\n", "plot(space)\n", "plot!(ids.interferometer, mirror_length=0.7, linewidth=4, mirror_thickness=0.2)\n", - "plot!(legend=true)" + "plot!(legend=true, left_margin = 10Plots.pt)" ] }, { @@ -323,7 +321,7 @@ "\n", "plot(space)\n", "plot!(ids.interferometer, mirror=false)\n", - "plot!(legend=true)" + "plot!(legend=true, left_margin=10Plots.pt)" ] }, { @@ -345,7 +343,7 @@ "\n", "plot(space)\n", "plot!(ids.interferometer.channel[1], label=\"Channel 1\")\n", - "plot!(legend=true)" + "plot!(legend=true, left_margin=10Plots.pt)" ] }, { @@ -368,7 +366,7 @@ "gr() # Fast and can save pdf\n", "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", - "plot(ids.interferometer, plot_type=:n_e)" + "plot(ids.interferometer, plot_type=:n_e, left_margin=10Plots.pt)" ] }, { @@ -381,7 +379,7 @@ "gr() # Fast and can save pdf\n", "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", - "plot(ids.interferometer, plot_type=:n_e_average)" + "plot(ids.interferometer, plot_type=:n_e_average, left_margin=10Plots.pt)" ] }, { @@ -401,7 +399,7 @@ "gr() # Fast and can save pdf\n", "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", - "plot(ids.interferometer.channel[1], plot_type=:n_e_average)" + "plot(ids.interferometer.channel[1], plot_type=:n_e_average, left_margin=10Plots.pt)" ] }, { @@ -419,7 +417,7 @@ "metadata": {}, "outputs": [], "source": [ - "SynthDiag.add_langmuir_probes!(\"samples/default_langmuir_probes.json\", ids)" + "SynthDiag.add_langmuir_probes!(SynthDiag.default_lp, ids; n_e_gsi=-5);" ] }, { @@ -439,7 +437,7 @@ "# plotlyjs() # Use for interactive plot, can only save png\n", "\n", "plot(ids.langmuir_probes.embedded[1].time, ids.langmuir_probes.embedded[1].n_e.data, label=ids.langmuir_probes.embedded[1].name)\n", - "plot!(ylabel=\"Electron density / m^(-3)\", xlabel=\"Time / s\", legend=true)" + "plot!(ylabel=\"Electron density / m^(-3)\", xlabel=\"Time / s\", legend=true, left_margin=10Plots.pt)" ] } ], diff --git a/example/makefile b/example/makefile deleted file mode 100644 index 8d6c7a2..0000000 --- a/example/makefile +++ /dev/null @@ -1,25 +0,0 @@ -SHELL := /bin/zsh -help: - @echo "Help Menu" - @echo - @echo "make env_with_cloned_repo: Creates a Julia environment with the cloned repositories" - @echo "make env_with_git_url: Creates a Julia environment with the git urls without creating local clones" - @echo "make clean: Deletes Project.toml and Manifest.toml for a fresh start" - @echo "make Clean: Deletes Project.toml, Manifest.toml, and any cloned repositories for a fresh start" - @echo - -env_with_cloned_repo: - chmod +x create_env_with_clone.sh - ./create_env_with_clone.sh - -env_with_git_url: - chmod +x create_env_with_git.sh - ./create_env_with_git.sh - -clean: - @echo "Deleting Project.toml and Manifest.toml" - rm Project.toml Manifest.toml - -Clean: clean - chmod +x rm_cloned_repo.sh - ./rm_cloned_repo.sh \ No newline at end of file diff --git a/example/rm_cloned_repo.sh b/example/rm_cloned_repo.sh deleted file mode 100644 index 351e0cc..0000000 --- a/example/rm_cloned_repo.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -echo "Deleting any cloned repo" -is_repo=$(git rev-parse --is-inside-work-tree 2>/dev/null) -if [[ ${is_repo} == "true" ]] -then - SD4SOLPS_path="$(git rev-parse --show-toplevel)" - project_path="$(dirname ${SD4SOLPS_path})" -else - SD4SOLPS_path="" - project_path="$(dirname $(pwd))" -fi - -echo "Removing extra cloned git repositories from "$project_path - -if [[ -z $SD4SOLPS_path ]] -then - # Check if $project_path"/SD4SOLPS.jl" exists - if [[ -d $project_path"/SD4SOLPS.jl" ]] - then - echo "Removing "$project_path"/SD4SOLPS.jl" - rm -rf $project_path"/SD4SOLPS.jl" - fi -fi -for repo in SynthDiag.jl SOLPS2IMAS.jl OMAS.jl GGDUtils.jl -do - if [[ -d $project_path"/"$repo ]] - then - echo "Removing "$project_path"/"$repo - rm -rf $project_path"/"$repo - fi -done \ No newline at end of file diff --git a/example/simple_demo.jl b/example/simple_demo.jl new file mode 100644 index 0000000..9f28b99 --- /dev/null +++ b/example/simple_demo.jl @@ -0,0 +1,39 @@ +# A simple demo that can be run without jupyter notebooks. +# First, activate the project (preferably after loading Revise), then include this file: +# > using Revise +# ] activate . +# > include("example/simple_demo.jl") +using SOLPS2IMAS: SOLPS2IMAS +using GGDUtils: GGDUtils +using Plots +using SD4SOLPS + +sample_path = "$(@__DIR__)/../sample/ITER_Lore_2296_00000/" +solps2imas_samples = splitdir(pathof(SOLPS2IMAS))[1] * "/../samples" + +dd = SD4SOLPS.preparation( + "Baseline2008-li0.70.x4.mod2.eqdsk", + [sample_path, solps2imas_samples]...; + eqdsk_set_time=0.0, +) + +grid_ggd = dd.edge_profiles.grid_ggd[1] # First grid_ggd time slice. It is allowed to vary in time +space = grid_ggd.space[1] # First space in this grid_ggd + +# Choose backend +gr() # Fast and can save pdf +# plotlyjs() # Use for interactive plot, can only save png +n_e = GGDUtils.get_prop_with_grid_subset_index( + dd.edge_profiles.ggd[1].electrons.density, + 5, +) +plot(dd.edge_profiles.grid_ggd, n_e; colorbar_title="Electrons density / m^(-3)") +plot!( + space, + GGDUtils.get_grid_subset(grid_ggd, 16); + linecolor=:black, + linewidth=2, + linestyle=:solid, + label="Separatix", + legend=true, +) diff --git a/makefile b/makefile new file mode 100644 index 0000000..e124ea4 --- /dev/null +++ b/makefile @@ -0,0 +1,29 @@ +SHELL := /bin/zsh +help: + @echo "Help Menu" + @echo + @echo "make env_with_cloned_repo (or make r): Creates a Julia environment with the cloned repositories" + @echo "make env_with_git_url (or make u): Creates a Julia environment with the git urls without creating local clones" + @echo "make clean: Deletes Project.toml and Manifest.toml for a fresh start" + @echo + +env_with_cloned_repo r: + @echo "Pulling sample files using dvc" + -dvc pull + @echo "Creating Julia environment by creating local clones of dependent repositories" + @echo "Cloning the repositories and generating Manifest.toml" + -git clone "git@github.com:ProjectTorreyPines/IMASDD.jl.git" ../IMASDD; \ + git clone "git@github.com:ProjectTorreyPines/GGDUtils.jl.git" ../GGDUtils; \ + git clone "git@github.com:ProjectTorreyPines/SOLPS2IMAS.jl.git" ../SOLPS2IMAS; \ + julia --project=. -e 'using Pkg; Pkg.rm(["IMASDD", "GGDUtils", "SOLPS2IMAS", "EFIT"]); Pkg.develop(path="../IMASDD"); Pkg.develop(path="../GGDUtils"); Pkg.develop(path="../SOLPS2IMAS"); Pkg.add(url="git@github.com:JuliaFusion/EFIT.jl.git", rev="master"); Pkg.instantiate()' + +env_with_git_url u: + @echo "Pulling sample files using dvc" + -dvc pull + @echo "Creating Julia environment with the git urls without creating local clones" + @echo "Generating Project.toml and Manifest.toml" + julia --project=. -e 'using Pkg; Pkg.rm(["IMASDD", "GGDUtils", "SOLPS2IMAS", "EFIT"]); Pkg.add(url="git@github.com:ProjectTorreyPines/IMASDD.jl.git", rev="master"); Pkg.add(url="git@github.com:ProjectTorreyPines/GGDUtils.jl.git", rev="master"); Pkg.add(url="git@github.com:ProjectTorreyPines/SOLPS2IMAS.jl.git", rev="master"); Pkg.add(url="git@github.com:JuliaFusion/EFIT.jl.git", rev="master"); Pkg.instantiate()' + +clean: + @echo "Deleting Manifest.toml" + - rm Manifest.toml diff --git a/sample/geqdsk_iter_small_sample b/sample/geqdsk_iter_small_sample index e51be73..a230a97 100644 --- a/sample/geqdsk_iter_small_sample +++ b/sample/geqdsk_iter_small_sample @@ -1,4 +1,4 @@ - EFITD 00/00/2008 #002296 0200ms Convert 0 65 65 + EFITD 00/00/2008 #002296 0200 0 65 65 0.624390220E+01 0.124878049E+02 0.619999981E+01 0.307804870E+01 0.200000050E+00 0.635000000E+01 0.580000000E+00 0.118000000E+02 0.000000000E+00 0.530000019E+01 0.150229038E+08 0.118000000E+02 0.000000000E+00 0.635000000E+01 0.000000000E+00 diff --git a/src/SD4SOLPS.jl b/src/SD4SOLPS.jl index 0f87fe4..fea8b06 100644 --- a/src/SD4SOLPS.jl +++ b/src/SD4SOLPS.jl @@ -1,41 +1,43 @@ module SD4SOLPS -using OMAS: OMAS +using IMASDD: IMASDD using SOLPS2IMAS: SOLPS2IMAS using EFIT: EFIT using Interpolations: Interpolations -#import GGDUtils -export find_files_in_allowed_folders -export geqdsk_to_imas! +export find_files_in_allowed_folders, geqdsk_to_imas!, preparation include("$(@__DIR__)/supersize_profile.jl") include("$(@__DIR__)/repair_eq.jl") -include("$(@__DIR__)/actuator_model.jl") - -greet() = print("Hello World!") +include("$(@__DIR__)/unit_utils.jl") """ - find_files_in_allowed_folders() + find_files_in_allowed_folders( + input_dirs::String...; + eqdsk_file::String, + recursive::Bool=true, + ) Searches a list of allowed folders for a set of filenames that will provide information about the SOLPS case. Returns a list of filenames with complete paths. Example: + +```julia SD4SOLPS.find_files_in_allowed_folders( -"/D3D_Ma_184833_03600", -eqdsk_file="g184833.03600", + "/D3D_Ma_184833_03600"; + eqdsk_file="g184833.03600", ) +``` """ function find_files_in_allowed_folders( - input_dirs...; - eqdsk_file, - recursive=true, - allow_reduced_versions=false, -) - files = ["b2fgmtry", "b2time.nc", "b2mn.dat", "gridspacedesc.yml", eqdsk_file] + input_dirs::String...; + eqdsk_file::String, + recursive::Bool=true, +)::Vector{String} + files = ["b2fgmtry", "b2time.nc", "b2mn.dat", eqdsk_file] reduced_files = - ["b2fgmtry_red", "b2time_red.nc", "b2mn.dat", "gridspacedesc.yml", eqdsk_file] + ["b2fgmtry_red", "b2time_red.nc", "b2mn.dat", eqdsk_file] output_files = fill("", length(files)) if recursive dirs = [] @@ -46,7 +48,7 @@ function find_files_in_allowed_folders( else dirs = input_dirs end - for i ∈ 1:length(files) + for i ∈ eachindex(files) for dir ∈ dirs file = dir * "/" * files[i] reduced_file = dir * "/" * reduced_files[i] @@ -63,18 +65,35 @@ function find_files_in_allowed_folders( end """ - geqdsk_to_imas!() - -Transfers the equilibrium reconstruction in an EFIT-style gEQDSK file into + geqdsk_to_imas!( + eqdsk_file::String, + dd::IMASDD.dd; + set_time::Union{Nothing, Float64}=nothing, + time_index::Int=1, + ) + +Transfers the equilibrium reconstruction from an EFIT-style gEQDSK file into the IMAS DD structure. """ -function geqdsk_to_imas!(eqdsk_file, dd; time_index=1) +function geqdsk_to_imas!( + eqdsk_file::String, + dd::IMASDD.dd; + set_time::Union{Nothing, Float64}=nothing, + time_index::Int=1, +) # https://github.com/JuliaFusion/EFIT.jl/blob/master/src/io.jl - g = EFIT.readg(eqdsk_file) + g = EFIT.readg(eqdsk_file; set_time=set_time) # Copying ideas from OMFIT: omfit/omfit_classes/omfit_eqdsk.py / to_omas() eq = dd.equilibrium - resize!(eq.time_slice, 1) + if IMASDD.ismissing(eq, :time) + eq.time = Array{Float64}(undef, time_index) + end + eq.time[time_index] = g.time + if length(eq.time_slice) < time_index + resize!(eq.time_slice, time_index) + end eqt = eq.time_slice[time_index] + eqt.time = g.time # 0D gq = eqt.global_quantities @@ -127,7 +146,7 @@ function geqdsk_to_imas!(eqdsk_file, dd; time_index=1) if length(xrs) > 0 bx = eqt.boundary.x_point resize!(bx, length(xrs)) - for i ∈ length(xrs) + for i ∈ eachindex(xrs) bx[i].r = xrs[i] bx[i].z = xzs[i] end @@ -172,107 +191,40 @@ function geqdsk_to_imas!(eqdsk_file, dd; time_index=1) end """ - core_profile_2d(dd, prof_time_idx, eq_time_idx, quantity) - -Reads a 1D core profile and flux map and returns a quantity at requested R,Z points -dd: a data dictionary instance with equilibrium and core profile data loaded -quantity: a string specifying the quantity to fetch - -Returns a callable function that takes (r, z) as arguments and returns the quantity -""" -function core_profile_2d(dd, prof_time_idx, eq_time_idx, quantity) - if !check_rho_1d(dd; time_slice=eq_time_idx) - throw(ArgumentError("Equilibrium rho profile in input DD was missing.")) - end - prof = dd.core_profiles.profiles_1d[prof_time_idx] - rho_prof = prof.grid.rho_tor_norm - quantity_fields = split(quantity, ".") - p = prof - for field ∈ quantity_fields - p = getproperty(p, Symbol(field)) - end - eqt = dd.equilibrium.time_slice[eq_time_idx] - p1 = eqt.profiles_1d - p2 = eqt.profiles_2d[1] - gq = eqt.global_quantities - psi_a = gq.psi_axis - psi_b = gq.psi_boundary - rhon_eq = p1.rho_tor_norm - psi_eq = p1.psi - psin_eq = (psi_eq .- psi_a) ./ (psi_b - psi_a) - psirz = p2.psi - psinrz = (psirz .- psi_a) ./ (psi_b - psi_a) - r_eq = p2.grid.dim1 - z_eq = p2.grid.dim2 - extension = [1.0001, 1.1, 5] - # rho_N isn't defined on open flux surfaces, so it is extended by copying psi_N - psin_eq_ext = copy(psin_eq) - append!(psin_eq_ext, extension) - rhon_eq_ext = copy(rhon_eq) - append!(rhon_eq_ext, extension) - neg_extension = [-5, -0.0001] # I guess this would be at a PF coil or something? - prepend!(psin_eq_ext, neg_extension) - prepend!(rhon_eq_ext, neg_extension) - - rho_prof_ext = vcat(rho_prof, extension) - p_ext = vcat(p, zeros(size(extension))) - rho_prof_ext = prepend!(rho_prof_ext, neg_extension) - p_ext = prepend!(p_ext, zeros(size(neg_extension))) - - # Data are set up and ready to process - - # EDGE PROFILES (the input data): - # rho_prof_ext: rho_N values associated with the profile of some quantity - # p_ext : profile of some quantity vs. rho_prof - - # EQUILBRIUM (the connection from rho to R,Z via psi): - # psin_eq_ext : 1D array of psi_N values that corresponds to rhon_eq_ext - # rhon_eq_ext : 1D array of rho_N values that corresponds to psin_eq_ext - # --> connects rho to everything else via psi - # psinrz : 2D array of psi_N values that corresponds to r_eq and z_eq - # r_eq and z_eq : 1D coordinate arrays that correspond to psinrz - - # OUTPUT INSTRUCTIONS: - # r and z : coordinates of output points where values of p are desired - - # psi_at_requested_points = - # Interpolations.linear_interpolation((r_eq, z_eq), psinrz).(r, z) - # rhonpsi = Interpolations.linear_interpolation(psin_eq_ext, rhon_eq_ext) - # rho_at_requested_points = rhonpsi.(psi_at_requested_points) - # itp = Interpolations.linear_interpolation(rho_prof_ext, p_ext) - # p_at_requested_points = itp.(rho_at_requested_points) - # return p_at_requested_points - rz2psin = Interpolations.linear_interpolation((r_eq, z_eq), psinrz) - psin2rhon = Interpolations.linear_interpolation(psin_eq_ext, rhon_eq_ext) - rhon2prof = Interpolations.linear_interpolation(rho_prof_ext, p_ext) - return (r, z) -> rhon2prof(psin2rhon(rz2psin(r, z))) -end - -""" - preparation() + preparation( + eqdsk_file::String, + dirs::String...; + core_method::String="simple", + filename::String="sd_input_data", + output_format::String="json", + eqdsk_set_time::Union{Nothing, Float64}=nothing, + eq_time_index::Int64=1, + )::IMASDD.dd Gathers SOLPS and EFIT files and loads them into IMAS structure. Extrapolates profiles as needed to get a complete picture. """ function preparation( - eqdsk_file, - dirs...; + eqdsk_file::String, + dirs::Vector{String}; core_method::String="simple", - edge_method::String="simple", filename::String="sd_input_data", output_format::String="json", -) - b2fgmtry, b2time, b2mn, gridspec, eqdsk = + eqdsk_set_time::Union{Nothing, Float64}=nothing, + eq_time_index::Int64=1, +)::IMASDD.dd + b2fgmtry, b2time, b2mn, eqdsk = find_files_in_allowed_folders(dirs...; eqdsk_file=eqdsk_file) println("Found source files:") println(" b2fgmtry = ", b2fgmtry) println(" b2time = ", b2time) println(" b2mn.dat = ", b2mn) - println(" gridspec = ", gridspec) println(" eqdsk = ", eqdsk) - dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time, gridspec, b2mn) - geqdsk_to_imas!(eqdsk, dd) + dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time; b2mn=b2mn) + geqdsk_to_imas!(eqdsk, dd; set_time=eqdsk_set_time, time_index=eq_time_index) + # Repairs + add_rho_to_equilibrium!(dd) # Doesn't do anything if rho is valid println("Loaded input data into IMAS DD") fill_in_extrapolated_core_profile!(dd, "electrons.density"; method=core_method) @@ -286,7 +238,7 @@ function preparation( println("Extrapolated edge profiles (but not really (placeholder only))") if output_format == "json" - OMAS.imas2json(dd, filename * ".json") + IMASDD.imas2json(dd, filename * ".json") else throw(ArgumentError(string("Unrecognized output format: ", output_format))) end diff --git a/src/repair_eq.jl b/src/repair_eq.jl index 24b53bb..3d9bae7 100644 --- a/src/repair_eq.jl +++ b/src/repair_eq.jl @@ -13,11 +13,19 @@ export add_rho_to_equilibrium! export check_rho_1d """ - function check_rho_1d() + check_rho_1d( + dd::IMASDD.dd; + time_slice::Int64=1, + throw_on_fail::Bool=false, + )::Bool Checks to see if rho exists and is valid in the equilibrium 1d profiles """ -function check_rho_1d(dd::OMAS.dd; time_slice::Int64=1) +function check_rho_1d( + dd::IMASDD.dd; + time_slice::Int64=1, + throw_on_fail::Bool=false, +)::Bool rho = dd.equilibrium.time_slice[time_slice].profiles_1d.rho_tor_norm if length(rho) < 1 rho_okay = false @@ -26,15 +34,33 @@ function check_rho_1d(dd::OMAS.dd; time_slice::Int64=1) else rho_okay = true end + if (throw_on_fail) * (!rho_okay) + if length(rho) < 1 + reason = "rho is missing" + else + reason = "rho is all zeros" + end + throw( + ArgumentError( + string( + "Equilibrium rho profile data at time index ", time_slice, + " is invalid. This is usually because rho is missing or all", + " zeros in the source file. In this case, ", reason, ". You", + " can try using add_rho_to_equilibrium!() to calculate rho ", + "and add it.", + ), + ), + ) + end return rho_okay end """ - function add_rho_to_equilibrium(dd:OMAS.dd) + function add_rho_to_equilibrium(dd:IMASDD.dd) Adds equilibrium rho profile to the DD """ -function add_rho_to_equilibrium!(dd::OMAS.dd) +function add_rho_to_equilibrium!(dd::IMASDD.dd) nt = length(dd.equilibrium.time_slice) if nt < 1 println("No equilibrium time slices to work with; can't add rho") @@ -53,8 +79,14 @@ function add_rho_to_equilibrium!(dd::OMAS.dd) continue end end - if length(eqt.profiles_1d.phi) == 0 - resize!(eqt.profiles_1d.phi, n) + if ( + if IMASDD.ismissing(eqt.profiles_1d, :phi) + true + else + IMASDD.isempty(eqt.profiles_1d.phi) + end + ) + eqt.profiles_1d.phi = Array{Float64}(undef, n) psi2 = eqt.profiles_2d[1].psi req = collect(eqt.profiles_2d[1].grid.dim1) zeq = collect(eqt.profiles_2d[1].grid.dim2) diff --git a/src/supersize_profile.jl b/src/supersize_profile.jl index cb75739..6522481 100644 --- a/src/supersize_profile.jl +++ b/src/supersize_profile.jl @@ -2,22 +2,22 @@ Utilities for extrapolating profiles """ -# import CalculusWithJulia -using OMAS: OMAS +using IMASDD: IMASDD using Interpolations: Interpolations using GGDUtils: - GGDUtils, get_grid_subset_with_index, add_subset_element!, get_subset_boundary, - project_prop_on_subset!, get_subset_centers + GGDUtils, get_grid_subset, add_subset_element!, get_subset_boundary, + project_prop_on_subset!, get_subset_centers, get_TPS_mats using PolygonOps: PolygonOps using JSON: JSON -export extrapolate_core -export fill_in_extrapolated_core_profile -export mesh_psi_spacing -export find_x_points! +export extrapolate_core, + fill_in_extrapolated_core_profile!, mesh_psi_spacing, cached_mesh_extension! """ - cumul_integrate(x::AbstractVector, y::AbstractVector) + cumul_integrate( + x::AbstractVector, + y::AbstractVector, + )::AbstractVector Computes cumulative integral of y(x) using trapezoidal rule. Source code from obsolete NumericalIntegration.jl package. @@ -25,7 +25,10 @@ https://github.com/dextorious/NumericalIntegration.jl/blob/540b6c4bee089dfef7b9a Modified the code to remove use of @inbounds, @fastmath macros that Julia documentation recommends to use with caution. """ -function cumul_integrate(x::AbstractVector, y::AbstractVector) +function cumul_integrate( + x::AbstractVector, + y::AbstractVector, +)::AbstractVector init = (x[2] - x[1]) * (y[1] + y[2]) n = length(x) retarr = Vector{typeof(init)}(undef, n) @@ -38,7 +41,11 @@ function cumul_integrate(x::AbstractVector, y::AbstractVector) end """ - extrapolate_core(edge_rho, edge_quantity, rho_output) + extrapolate_core( + edge_rho::Vector{Float64}, + edge_quantity::Vector{Float64}, + rho_output::Vector{Float64}, + )::Vector{Float64} Function for assuming a core profile when given edge profile data. @@ -54,15 +61,19 @@ Concept: 5. after making up a very simple gradient profile out of a few line segments, integrate it to get the profile of the quantity in question """ -function extrapolate_core(edge_rho, edge_quantity, rho_output) - grad = OMAS.gradient(edge_rho, edge_quantity) +function extrapolate_core( + edge_rho::Vector{Float64}, + edge_quantity::Vector{Float64}, + rho_output::Vector{Float64}, +)::Vector{Float64} + grad = IMASDD.gradient(edge_rho, edge_quantity) gf = grad[1] rf = edge_rho[1] gmid = -abs(gf) / 4.0 rmid = rf / 2.0 rped_enforce = rf - 0.08 - gped_enforce = rf + (gmid - gf) / (rmid - rf) * rped_enforce - gped_max = maximum(grad) / 10.0 + gped_enforce = gf + (gmid - gf) / (rmid - rf) * rped_enforce + gped_max = maximum(grad) / 2.0 gped_enforce = minimum([abs(gped_enforce), abs(gped_max)]) * sign(gf) gg = [0, gmid, gped_enforce, gf] @@ -89,57 +100,63 @@ function extrapolate_core(edge_rho, edge_quantity, rho_output) ).(rho_output[rho_output.>=rf]) return output_profile end -#!format off + """ fill_in_extrapolated_core_profile!( - dd::OMAS.dd, - quantity_name::String; - method::String="simple", - eq_time_idx::Int64=1, - eq_profiles_2d_idx::Int64=1, - grid_ggd_idx::Int64=1, - space_idx::Int64=1, -) - + dd::IMASDD.dd, + quantity_name::String; + method::String="simple", + eq_time_idx::Int64=1, + eq_profiles_2d_idx::Int64=1, + grid_ggd_idx::Int64=1, + space_idx::Int64=1, + ) -This function accepts a DD that should be populated with equilibrium and edge_profiles -as well as a request for a quantity to extrapolate into the core. It then maps -edge_profiles data to rho, calls the function that performs the extrapolation (which is +This function accepts a DD that should be populated with `equilibrium` and +`edge_profiles` as well as a request for a quantity to extrapolate into the core. It +then maps `edge_profiles` data to rho, calls the function that performs the extrapolation (which is not a simple linear extrapolation but has some trickery to attempt to make a somewhat convincing profile shape), and writes the result to core_profiles. This involves a bunch of interpolations and stuff. -dd: an IMAS/OMAS data dictionary -quantity_name: the name of a quantity in edge_profiles.profiles_2d and - core_profiles.profiles_1d, such as "electrons.density" -method: Extrapolation method. -eq_time_idx: index of the equilibrium time slice to use. For a typical SOLPS run, - the SOLPS mesh will be based on the equilibrium reconstruction at a single - time, so the DD associated with the SOLPS run only needs one equilibrium - time slice to be loaded. However, one could combine the complete - equilibrium time series with the SOLPS run and then have to specify which - slice of the equilibrium corresponds to the SOLPS mesh. -grid_ggd_idx: index of the grid_ggd to use. For a typical SOLPS run, the SOLPS grid is - fixed, so this index defaults to 1. But in future, if a time varying grid - is used, then this index will need to be specified. -space_idx: index of the space to use. For a typical SOLPS run, there will be only one - space so this index will mostly remain at 1. + +Input arguments: + + - `dd`: an IMAS data dictionary + - `quantity_name`: the name of a quantity in `edge_profiles.profiles_2d` and + `core_profiles.profiles_1d`, such as "electrons.density" + - `method`: Extrapolation method. + - `eq_time_id`x: index of the equilibrium time slice to use. For a typical SOLPS run, + the SOLPS mesh will be based on the equilibrium reconstruction at a single + time, so the DD associated with the SOLPS run only needs one equilibrium + time slice to be loaded. However, one could combine the complete + equilibrium time series with the SOLPS run and then have to specify which + slice of the equilibrium corresponds to the SOLPS mesh. + - `eq_profiles_2d_idx`: index of the `profiles_2D` in equilibrium `time_slice`. + - `grid_ggd_idx`: index of the `grid_ggd` to use. For a typical SOLPS run, the SOLPS + grid is fixed, so this index defaults to 1. But in future, if a time varying grid is + used, then this index will need to be specified. + - `space_id`x: index of the space to use. For a typical SOLPS run, there will be only + one space so this index will mostly remain at 1. + - `cell_subset_idx`: index of the subset of cells to use for the extrapolation. The + default is 5, which is the subset of all cells. If `edge_profiles` data is instead + present for a different subset, for instance, -5, which are b2.5 cells only, then + this index should be set to -5. """ -#!format on function fill_in_extrapolated_core_profile!( - dd::OMAS.dd, + dd::IMASDD.dd, quantity_name::String; method::String="simple", eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1, grid_ggd_idx::Int64=1, space_idx::Int64=1, + cell_subset_idx::Int64=5, ) + check_rho_1d(dd; time_slice=eq_time_idx, throw_on_fail=true) grid_ggd = dd.edge_profiles.grid_ggd[grid_ggd_idx] space = grid_ggd.space[space_idx] - cell_subset = - get_grid_subset_with_index(grid_ggd, 5) - midplane_subset = - get_grid_subset_with_index(grid_ggd, 11) + cell_subset = get_grid_subset(grid_ggd, cell_subset_idx) + midplane_subset = get_grid_subset(grid_ggd, 11) if length(midplane_subset.element) < 1 throw( @@ -166,6 +183,7 @@ function fill_in_extrapolated_core_profile!( if length(dd.core_profiles.profiles_1d) < nt resize!(dd.core_profiles.profiles_1d, nt) end + TPS_mats = get_TPS_mats(grid_ggd, cell_subset_idx) for it ∈ 1:nt tags = split(quantity_name, ".") quantity_str = dd.edge_profiles.ggd[it] @@ -178,7 +196,7 @@ function fill_in_extrapolated_core_profile!( cell_subset, midplane_subset, space; - interp_method=:KDTree, + TPS_mats=TPS_mats, ) # Now quantity is at the outboard midplane @@ -225,10 +243,9 @@ function fill_in_extrapolated_core_profile!( ).(psi_for_quantity[in_bounds]) # Make sure the output 1D rho grid exists; create it if needed - if length(dd.core_profiles.profiles_1d[it].grid.rho_tor_norm) == 0 - resize!(dd.core_profiles.profiles_1d[it].grid.rho_tor_norm, 201) - # If you don't like this default, then you should write grid.rho_tor_norm before - # calling this function. + if IMASDD.ismissing(dd.core_profiles.profiles_1d[it].grid, :rho_tor_norm) + # If you don't like this default, then you should write grid.rho_tor_norm + # before calling this function. dd.core_profiles.profiles_1d[it].grid.rho_tor_norm = collect(LinRange(0, 1, 201)) end @@ -252,38 +269,43 @@ function fill_in_extrapolated_core_profile!( end """ - function extrapolate_edge_exp( + extrapolate_edge_exp( quantity_edge::Vector{Float64}, dqdpsi::Vector{Float64}, psin_out::Vector{Float64}, - ) + )::Matrix{Float64} Exponential decay version of edge profile extrapolation. Should work well for many quantities, including Te and ne. If the exponential profile has no background offset, then its amplitude and scale length can be completely defined by matching the quantity value and first derivative at the edge of the mesh. -quantity_edge: values of some physics quantity in cells along the outer edge of the mesh -dqdpsi: Gradient of the quantity vs. psi, aligned perpendicular to the row of cells -being used. -psin_out: Normalized psi values along a vector orthogonal to the row of cells along the -edge. These psi_N values should be outside of the mesh (because the quantity is -already known in the mesh). +Input Arguments: + + - `quantity_edge`: values of some physics quantity in cells along the outer edge of + the mesh. + + - `dqdpsi`: Gradient of the quantity vs. psi, aligned perpendicular to the row of + cells being used. + - `psin_out`: Normalized psi values along a vector orthogonal to the row of cells + along the edge. These `psi_N` values should be outside of the mesh (because the + quantity is already known in the mesh). + The output will be a matrix. """ function extrapolate_edge_exp( quantity_edge::Vector{Float64}, dqdpsi::Vector{Float64}, psin_out::Vector{Float64}, -) +)::Matrix{Float64} x = psin_out - 1.0 lambda = -quantity_edge / dqdpsi q0 = quantity_edge ./ exp(-x' ./ lambda) - return y0 * exp(-x ./ lambda) + return q0 * exp(-x ./ lambda) end """ - prep_flux_map() + prep_flux_map(dd::IMASDD.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1) Reads equilibrium data and extracts/derives some useful quantities. This is very basic, but it was being repeated and that's a no-no. @@ -294,7 +316,7 @@ Returns: - normalized poloidal flux on the equilibrium grid - a linear interpolation of norm pol flux vs. R and Z, ready to be evaluated """ -function prep_flux_map(dd::OMAS.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1) +function prep_flux_map(dd::IMASDD.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1) eqt = dd.equilibrium.time_slice[eq_time_idx] p2 = eqt.profiles_2d[eq_profiles_2d_idx] r_eq = p2.grid.dim1 @@ -303,44 +325,46 @@ function prep_flux_map(dd::OMAS.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::In psia = eqt.global_quantities.psi_axis psib = eqt.global_quantities.psi_boundary psin_eq = (psi .- psia) ./ (psib - psia) - rzpi = Interpolations.LinearInterpolation((r_eq, z_eq), psin_eq) + rzpi = Interpolations.linear_interpolation((r_eq, z_eq), psin_eq) return r_eq, z_eq, psin_eq, rzpi end -#! format off """ mesh_psi_spacing( - dd::OMAS.dd; - eq_time_idx::Int64=1, - eq_profiles_2d_idx::Int64=1, - grid_ggd_idx::Int64=1, - space_idx::Int64=1, - -) + dd::IMASDD.dd; + eq_time_idx::Int64=1, + eq_profiles_2d_idx::Int64=1, + grid_ggd_idx::Int64=1, + space_idx::Int64=1, + avoid_guard_cell::Bool=true, + spacing_rule="mean", + ) Inspects the mesh to see how far apart faces are in psi_N. Requires that GGD and equilibrium are populated. -dd: a data dictionary instance with required data loaded into it -eq_time_idx: index of the equilibrium time slice to use. For a typical SOLPS run, - the SOLPS mesh will be based on the equilibrium reconstruction at a single - time, so the DD associated with the SOLPS run only needs one equilibrium - time slice to be loaded. However, one could combine the complete - equilibrium time series with the SOLPS run and then have to specify which - slice of the equilibrium corresponds to the SOLPS mesh. -grid_ggd_idx: index of the grid_ggd to use. For a typical SOLPS run, the SOLPS grid is - fixed, so this index defaults to 1. But in future, if a time varying grid - is used, then this index will need to be specified. -space_idx: index of the space to use. For a typical SOLPS run, there will be only one - space so this index will mostly remain at 1. -avoid_guard_cell: assume that the last cell is a guard cell so take end-2 and end-1 - instead of end and end-1 -spacing_rule: "edge" or "mean" to make spacing of new cells (in psi_N) be the same - as the spacing at the edge of the mesh, or the same as the average spacing +Input Arguments: + + - `dd`: a data dictionary instance with required data loaded into it + - `eq_time_idx`: index of the equilibrium time slice to use. For a typical SOLPS run, + the SOLPS mesh will be based on the equilibrium reconstruction at a single + time, so the DD associated with the SOLPS run only needs one equilibrium + time slice to be loaded. However, one could combine the complete + equilibrium time series with the SOLPS run and then have to specify which + slice of the equilibrium corresponds to the SOLPS mesh. + - `eq_profiles_2d_id`x: index of the `profiles_2D` in equilibrium `time_slice`. + - `grid_ggd_idx`: index of the `grid_ggd` to use. For a typical SOLPS run, the SOLPS + grid is fixed, so this index defaults to 1. But in future, if a time varying grid + is used, then this index will need to be specified. + - `space_idx`: index of the space to use. For a typical SOLPS run, there will be only + one space so this index will mostly remain at 1. + - `avoid_guard_cell`: assume that the last cell is a guard cell so take `end-2` and + `end-1` instead of `end` and `end-1` + - `spacing_rule`: "edge" or "mean" to make spacing of new cells (in `psi_N`) be the + same as the spacing at the edge of the mesh, or the same as the average spacing """ -#! format on function mesh_psi_spacing( - dd::OMAS.dd; + dd::IMASDD.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1, grid_ggd_idx::Int64=1, @@ -376,8 +400,7 @@ function mesh_psi_spacing( # weird. So use the outboard midplane. That's always a solid choice. grid_ggd = dd.edge_profiles.grid_ggd[grid_ggd_idx] space = grid_ggd.space[space_idx] - midplane_subset = - get_grid_subset_with_index(grid_ggd, 11) + midplane_subset = get_grid_subset(grid_ggd, 11) midplane_cell_centers = get_subset_centers(space, midplane_subset) r_mesh = [midplane_cell_centers[i][1] for i ∈ eachindex(midplane_cell_centers)] z_mesh = [midplane_cell_centers[i][2] for i ∈ eachindex(midplane_cell_centers)] @@ -403,20 +426,26 @@ function mesh_psi_spacing( end """ - pick_extension_psi_range() - -Defines the psi_N levels for an extended mesh. The range of psi_N levels starts -at the outer edge of the existing edge_profiles mesh at the midplane and goes + pick_extension_psi_range( + dd::IMASDD.dd; + eq_time_idx::Int64=1, + eq_profiles_2d_idx::Int64=1, + grid_ggd_idx::Int64=1, + space_idx::Int64=1, + )::Vector{Float64} + +Defines the `psi_N` levels for an extended mesh. The range of `psi_N` levels starts +at the outer edge of the existing `edge_profiles` mesh at the midplane and goes out to the most distant (in flux space) point on the limiting surface. -Returns a vector of psi_N levels. +Returns a vector of `psi_N` levels. """ function pick_extension_psi_range( - dd::OMAS.dd; + dd::IMASDD.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1, grid_ggd_idx::Int64=1, space_idx::Int64=1, -) +)::Vector{Float64} r_eq, z_eq, psin_eq, rzpi = prep_flux_map(dd; eq_time_idx, eq_profiles_2d_idx) # Use wall to find maximum extent of contouring project @@ -428,7 +457,7 @@ function pick_extension_psi_range( # Use ggd mesh to find inner limit of contouring project grid_ggd = dd.edge_profiles.grid_ggd[grid_ggd_idx] space = grid_ggd.space[space_idx] - midplane_subset = get_grid_subset_with_index(grid_ggd, 11) + midplane_subset = get_grid_subset(grid_ggd, 11) midplane_cell_centers = get_subset_centers(space, midplane_subset) psin_midplane = rzpi.(midplane_cell_centers[end][1], midplane_cell_centers[end][2]) @@ -459,24 +488,27 @@ function pick_extension_psi_range( end """ - pick_mesh_ext_starting_points(dd; grid_ggd_idx, space_idx) + pick_mesh_ext_starting_points( + grid_ggd::IMASDD.edge_profiles__grid_ggd, + space::IMASDD.edge_profiles__grid_ggd___space, + )::Tuple{Vector{Float64}, Vector{Float64}} Picks starting points for the radial lines of the mesh extension. The strategy is to start from the outer edge of the existing mesh and follow the steepest gradient (of psi_N) to extend these gridlines outward. -dd: a data dictionary instance with edge_profiles ggd and equilibrium loaded -grid_ggd_idx: index within ggd -space_idx: space number / index of the space to work with within edge_profiles Returns a tuple with vectors of R and Z starting points. """ -function pick_mesh_ext_starting_points(grid_ggd, space) +function pick_mesh_ext_starting_points( + grid_ggd::IMASDD.edge_profiles__grid_ggd, + space::IMASDD.edge_profiles__grid_ggd___space, +)::Tuple{Vector{Float64}, Vector{Float64}} # Choose starting points for the orthogonal (to the contour) gridlines # Use the existing cells of the standard mesh - all_cell_subset = get_grid_subset_with_index(grid_ggd, 5) + all_cell_subset = get_grid_subset(grid_ggd, 5) all_border_edges = get_subset_boundary(space, all_cell_subset) - core_edges = get_grid_subset_with_index(grid_ggd, 15) - outer_target = get_grid_subset_with_index(grid_ggd, 13) - inner_target = get_grid_subset_with_index(grid_ggd, 14) + core_edges = get_grid_subset(grid_ggd, 15) + outer_target = get_grid_subset(grid_ggd, 13) + inner_target = get_grid_subset(grid_ggd, 14) ci = [ele.object[1].index for ele ∈ core_edges.element] oi = [ele.object[1].index for ele ∈ outer_target.element] ii = [ele.object[1].index for ele ∈ inner_target.element] @@ -502,28 +534,39 @@ function pick_mesh_ext_starting_points(grid_ggd, space) return r, z end -#!format off """ - mesh_ext_follow_grad() + mesh_ext_follow_grad( + r_eq::Vector{Float64}, + z_eq::Vector{Float64}, + psin_eq::Matrix, + rstart::Vector{Float64}, + zstart::Vector{Float64}, + nlvl::Int64, + dpsin::Float64, + rzpi=nothing, + )::Tuple{Matrix{Float64}, Matrix{Float64}} Follows the steepest gradient from a set of starting points, dropping nodes at approximately regular intervals in psi_N. Due to the numerical techniques used, the node spacing may be imperfect (especially prone to error in regions where curvature -of psi_N is large compared to its gradient). -r_eq: Equilibrium reconstruction's grid, R coordinates -z_eq: Equilibrium reconstruction's grid, Z coordinates -psin_eq: Normalized poloidal flux in the equilibrium reconstruction as a function of R and Z -rstart: R coordinates of starting points for the gradient following. -zstart: Z coordinates of starting points -nlvl: number of nodes to drop while following the gradient -dpsin: node spacing in delta psi_N -rzpi: linear interpolation of psin_eq() as a function of r_eq and z_eq - This was probably already computed and I think time would be saved by reusing it. - If you don't already have it, you can pass in nothing and let this function calculate it. +of `psi_N` is large compared to its gradient). + +Input Arguments: + + - `r_eq`: Equilibrium reconstruction's grid, R coordinates + - `z_eq`: Equilibrium reconstruction's grid, Z coordinates + - `psin_eq`: Normalized poloidal flux in the equilibrium reconstruction as a function + of R and Z + - `rstar`t: R coordinates of starting points for the gradient following. + - `zstart`: Z coordinates of starting points + - `nlvl`: number of nodes to drop while following the gradient + - `dpsin`: node spacing in delta psi_N + - `rzpi`: linear interpolation of psin_eq() as a function of r_eq and z_eq. This was + probably already computed and I think time would be saved by reusing it. If you + don't already have it, you can pass in nothing and let this function calculate it. Returns two matrices with R and Z coordinates of the mesh extension """ -#!format on function mesh_ext_follow_grad( r_eq::Vector{Float64}, z_eq::Vector{Float64}, @@ -533,7 +576,7 @@ function mesh_ext_follow_grad( nlvl::Int64, dpsin::Float64, rzpi=nothing, -) +)::Tuple{Matrix{Float64}, Matrix{Float64}} npol = length(rstart) mesh_r = zeros((npol, nlvl)) mesh_z = zeros((npol, nlvl)) @@ -543,11 +586,11 @@ function mesh_ext_follow_grad( end if rzpi === nothing - rzpi = Interpolations.LinearInterpolation((r_eq, z_eq), psin_eq) + rzpi = Interpolations.linear_interpolation((r_eq, z_eq), psin_eq) end # Step along the paths of steepest descent to populate the mesh. - dpsindr, dpsindz = OMAS.gradient(r_eq, z_eq, psin_eq) + dpsindr, dpsindz = IMASDD.gradient(r_eq, z_eq, psin_eq) dpdr = Interpolations.linear_interpolation((r_eq, z_eq), dpsindr) dpdz = Interpolations.linear_interpolation((r_eq, z_eq), dpsindz) rlim = (minimum(r_eq), maximum(r_eq)) @@ -581,16 +624,23 @@ function mesh_ext_follow_grad( end """ - modify_mesh_ext_near_x! + modify_mesh_ext_near_x!( + eqt::IMASDD.equilibrium__time_slice, + mesh_r::Matrix{Float64}, + mesh_z::Matrix{Float64}, + ) Modifies an extended mesh near a secondary X-point to compensate for the tendency of the mesh to go nuts near the X-point. -eqt: equilibrium.time_slice information -mesh_r: matrix of R values for the extended mesh -mesh_z: matrix of Z values for the extended mesh + +Input Arguments: + + - `eqt`: equilibrium.time_slice information + - `mesh_r`: matrix of R values for the extended mesh + - `mesh_z`: matrix of Z values for the extended mesh """ function modify_mesh_ext_near_x!( - eqt::OMAS.equilibrium__time_slice, + eqt::IMASDD.equilibrium__time_slice, mesh_r::Matrix{Float64}, mesh_z::Matrix{Float64}, ) @@ -671,24 +721,29 @@ function modify_mesh_ext_near_x!( end end -#!format off """ - record_regular_mesh!() + record_regular_mesh!( + grid_ggd::IMASDD.edge_profiles__grid_ggd, + space::IMASDD.edge_profiles__grid_ggd___space, + mesh_r::Matrix{Float64}, + mesh_z::Matrix{Float64}, + cut::Int64, + ) Records arrays of mesh data from regular 2D arrays into the DD -grid_ggd: grid_ggd within edge_profiles -space: space in edge_profiles -mesh_r: Matrix of R values along a mesh. Should be 2D. The two dimensions are in - the radial and poloidal directions. -mesh_z: Z values to go with mesh_r. -cut: Poloidal index of a cut between two groups of disconnected cells. Poloidal - connections (faces, cells) will not be added between this poloidal index - and the next index. + + - `grid_ggd`: `grid_ggd` within edge_profiles + - `space`: space in edge_profiles + - `mesh_r`: Matrix of R values along a mesh. Should be 2D. The two dimensions are in + the radial and poloidal directions. + - `mesh_z`: Z values to go with mesh_r. + - `cut`: Poloidal index of a cut between two groups of disconnected cells. Poloidal + connections (faces, cells) will not be added between this poloidal index + and the next index. """ -#!format on function record_regular_mesh!( - grid_ggd, - space, + grid_ggd::IMASDD.edge_profiles__grid_ggd, + space::IMASDD.edge_profiles__grid_ggd___space, mesh_r::Matrix{Float64}, mesh_z::Matrix{Float64}, cut::Int64, @@ -736,29 +791,23 @@ function record_regular_mesh!( grid_ggd.grid_subset[n_existing_subsets+i].identifier.index = new_subset_indices[i] end - ext_nodes_sub = get_grid_subset_with_index(grid_ggd, -201) - ext_edges_sub = get_grid_subset_with_index(grid_ggd, -202) - ext_xedges_sub = get_grid_subset_with_index(grid_ggd, -203) - ext_yedges_sub = get_grid_subset_with_index(grid_ggd, -204) - ext_cells_sub = get_grid_subset_with_index(grid_ggd, -205) + ext_nodes_sub = get_grid_subset(grid_ggd, -201) + ext_edges_sub = get_grid_subset(grid_ggd, -202) + ext_xedges_sub = get_grid_subset(grid_ggd, -203) + ext_yedges_sub = get_grid_subset(grid_ggd, -204) + ext_cells_sub = get_grid_subset(grid_ggd, -205) # Preserve record of standard (non extended) mesh for i ∈ 1:5 - std_sub = get_grid_subset_with_index(grid_ggd, -i) - orig_sub = get_grid_subset_with_index(grid_ggd, i) - resize!(std_sub.element, length(orig_sub.element)) - for j ∈ 1:length(orig_sub.element) - std_sub.element[j] = deepcopy(orig_sub.element[j]) - end - std_sub.identifier.index = -i - std_sub.dimension = deepcopy(orig_sub.dimension) - std_sub.metric = deepcopy(orig_sub.metric) + orig_sub = get_grid_subset(grid_ggd, i) + grid_ggd.grid_subset[n_existing_subsets+i] = deepcopy(orig_sub) + grid_ggd.grid_subset[n_existing_subsets+i].identifier.index = -i end - all_nodes_sub = get_grid_subset_with_index(grid_ggd, 1) - all_edges_sub = get_grid_subset_with_index(grid_ggd, 2) - all_xedges_sub = get_grid_subset_with_index(grid_ggd, 3) - all_yedges_sub = get_grid_subset_with_index(grid_ggd, 4) - all_cells_sub = get_grid_subset_with_index(grid_ggd, 5) + all_nodes_sub = get_grid_subset(grid_ggd, 1) + all_edges_sub = get_grid_subset(grid_ggd, 2) + all_xedges_sub = get_grid_subset(grid_ggd, 3) + all_yedges_sub = get_grid_subset(grid_ggd, 4) + all_cells_sub = get_grid_subset(grid_ggd, 5) nodes = resize!(o0.object, n_nodes) edges = resize!(o1.object, n_edges) @@ -780,7 +829,7 @@ function record_regular_mesh!( add_subset_element!(all_nodes_sub, space_idx, node_dim, node_idx) # Edges - if (i > 1) & (i != cut) # i-1 to i in the npol direction + if (i > 1) & (i != (cut + 1)) # i-1 to i in the npol direction edge_idx1 = edge_start1 + iii * e1_per_i + jj edges[edge_idx1].nodes = [node_idx, node_idx - n_per_i] resize!(edges[edge_idx1].boundary, 2) @@ -804,7 +853,7 @@ function record_regular_mesh!( end # Cells - if (i > 1) & (i != cut) & (j > 1) + if (i > 1) & (i != (cut + 1)) & (j > 1) cell_idx = cell_start + iii * c_per_i + jjj cells[cell_idx].nodes = [ node_idx, @@ -826,7 +875,7 @@ function record_regular_mesh!( end """ - convert_filename(filename::String) + convert_filename(filename::String)::String Converts a filename into a string that doesn't have illegal characters. The main application is removing the path separator from source files with full @@ -835,7 +884,7 @@ files used to form some data can be part of the cache name, allowing quick looku the cache filename is defined by the input files, and if it doesn't exist, it needs to be generated. """ -function convert_filename(filename::String) +function convert_filename(filename::String)::String filename_mod = replace(filename, "/" => "__") # Illegal on *nix bc it's the path separator filename_mod = replace(filename_mod, ":" => "--") # Illegal on mac and windows filename_mod = replace(filename_mod, "\\" => "__") # Illegal on windows @@ -848,38 +897,46 @@ function convert_filename(filename::String) return filename_mod end -#!format off """ - cached_mesh_extension!() + cached_mesh_extension!( + dd::IMASDD.dd, + eqdsk_file::String, + b2fgmtry::String; + eq_time_idx::Int64=1, + eq_profiles_2d_idx::Int64=1, + grid_ggd_idx::Int64=1, + space_idx::Int64=1, + clear_cache::Bool=false, + )::String Adds an extended mesh to a data dictionary, possibly from a cached result. -dd: The data dictionary. It will be modified in place. -eqdsk_file: the name of the EQDSK file that was used to get equilibrium data in - the dd. -b2fgmtry: the name of the SOLPS geometry file that was used to get GGD info in - edge_profiles in the dd. -eq_time_idx: Index of the time slice in equilibrium -eq_profiles_2d_idx: Index of the 2D profile set in equilibrium - (there is usually only one) -grid_ggd_idx: Index of the grid_ggd set in edge_profiles -space_idx: Index of the space -clear_cache: delete any existing cache file (for use in testing) + +Input Arguments: + + - `dd`: The data dictionary. It will be modified in place. + - `eqdsk_file`: the name of the EQDSK file that was used to get equilibrium data in + the dd. + - `b2fgmtry`: the name of the SOLPS geometry file that was used to get GGD info in + `edge_profiles` in the dd. + - `eq_time_idx`: Index of the time slice in equilibrium + - `eq_profiles_2d_idx`: Index of the 2D profile set in equilibrium + (there is usually only one) + - `grid_ggd_idx`: Index of the `grid_ggd` set in edge_profiles + - `space_idx`: Index of the space + - `clear_cache`: delete any existing cache file (for use in testing) """ -#!format on function cached_mesh_extension!( - dd::OMAS.dd, + dd::IMASDD.dd, eqdsk_file::String, b2fgmtry::String; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1, grid_ggd_idx::Int64=1, space_idx::Int64=1, - clear_cache=false, -) + clear_cache::Bool=false, +)::String path = "$(@__DIR__)/../data/" - eqdsk_file_mod = convert_filename(eqdsk_file) - b2fgmtry_mod = convert_filename(eqdsk_file) - cached_ext_name = path * eqdsk_file_mod * "_" * b2fgmtry_mod * ".mesh_ext.json" + cached_ext_name = path * string(hash(eqdsk_file * b2fgmtry)) * ".mesh_ext.json" if clear_cache rm(cached_ext_name; force=true) return cached_ext_name @@ -933,12 +990,18 @@ function cached_mesh_extension!( end """ - function mesh_extension_sol() + mesh_extension_sol!( + dd::IMASDD.dd; + eq_time_idx::Int64=1, + eq_profiles_2d_idx::Int64=1, + grid_ggd_idx::Int64=1, + space_idx::Int64=1, + ) Extends the mesh out into the SOL """ function mesh_extension_sol!( - dd::OMAS.dd; + dd::IMASDD.dd; eq_time_idx::Int64=1, eq_profiles_2d_idx::Int64=1, grid_ggd_idx::Int64=1, @@ -985,13 +1048,16 @@ end """ fill_in_extrapolated_edge_profile!( - dd::OMAS.dd, quantity_name::String; method::String="simple", + dd::IMASDD.dd, + quantity_name::String; + method::String="simple", + eq_time_idx::Int64=1, ) JUST A PLACEHOLDER FOR NOW. DOESN'T ACTUALLY WORK YET. """ function fill_in_extrapolated_edge_profile!( - dd::OMAS.dd, + dd::IMASDD.dd, quantity_name::String; method::String="simple", eq_time_idx::Int64=1, diff --git a/src/actuator_model.jl b/src/unit_utils.jl similarity index 59% rename from src/actuator_model.jl rename to src/unit_utils.jl index ec0a0f0..739a55d 100644 --- a/src/actuator_model.jl +++ b/src/unit_utils.jl @@ -1,9 +1,7 @@ -# Actuator models to translate commands (probably in V) into gas flows - -using PhysicalConstants.CODATA2018 +using PhysicalConstants.CODATA2018: BoltzmannConstant, ElementaryCharge using Unitful: Unitful -using Interpolations: Interpolations -using YAML: YAML + +export gas_unit_converter torrl_per_pam3 = Float64( Unitful.upreferred((1 * Unitful.Torr * Unitful.L) / (Unitful.Pa * Unitful.m^3)), @@ -28,7 +26,13 @@ electrons_per_molecule = Dict( ) """ -gas_unit_converter() + gas_unit_converter( + value_in::Float64, + units_in::String, + units_out::String; + species::String="H", + temperature::Float64=293.15, + ) Converts gas flows between different units. Uses ideal gas law to convert between Pressure * volume type flows / quantities and count / current types of units. @@ -36,8 +40,11 @@ There is a version that accepts floats in and outputs floats, and another that deals in Unitful quantities. """ function gas_unit_converter( - value_in::Float64, units_in::String, units_out::String; species::String="H", - temperature=293.15, + value_in::Float64, + units_in::String, + units_out::String; + species::String="H", + temperature::Float64=293.15, ) if units_in == units_out return value_in @@ -76,26 +83,38 @@ function gas_unit_converter( end """ -gas_unit_converter() + gas_unit_converter( + value_in::Unitful.Quantity, + units_in::String, + units_out::String; + species::String="H", + temperature=293.15 * Unitful.K, + ) Converts gas flows between different units. Uses ideal gas law to convert between Pressure * volume type flows / quantities and count / current types of units. This is the Unitful version. + Output will be unitful, but the units are not simplified automatically. You can perform operations such as -(output |> Unitful.upreferred).val -Unitful.uconvert(Unitful.whatever, output).val + + - `(output |> Unitful.upreferred).val` + - `Unitful.uconvert(Unitful.whatever, output).val` + to handle simplification or conversion of units. -Although this function pretends torr L s^-1 and Pa m^3 s^-1 are different, use of -Unitful should cause them to behave the same way as long as you simplify or convert -units at the end. This means that you can use other pressure*volume type gas units -and call them torr L s^-1 and the script will deal with them up to having messy -units in the output. +Although this function pretends torr L s``^{-1}`` and Pa m``^3`` s``^{-1}`` are +different, use of Unitful should cause them to behave the same way as long as you +simplify or convert units at the end. This means that you can use other pressure*volume +type gas units and call them torr L s``^{-1}`` and the script will deal with them up to +having messy units in the output. """ function gas_unit_converter( - value_in::Unitful.Quantity, units_in::String, units_out::String; - species::String="H", temperature=293.15 * Unitful.K, + value_in::Unitful.Quantity, + units_in::String, + units_out::String; + species::String="H", + temperature=293.15 * Unitful.K, ) if units_in == units_out return value_in @@ -144,79 +163,3 @@ function gas_unit_converter( end return value_in .* conversion_factor end - -function select_default_config(model::String) - froot = model - if model == "instant" - froot = "simple" - end - filename = froot * "_gas_valve.yml" - return filename -end - -""" - model_gas_valve() - -The main function for handling a gas valve model. Has logic for selecting models -and configurations. -""" -function model_gas_valve( - model::String; configuration_file::String="auto", species::String="D2", -) - # Select configuration file - if configuration_file == "auto" - configuration_file = select_default_config(model) - end - default_config_path = "$(@__DIR__)/../config/" - if !occursin("/", configuration_file) - configuration_file = default_config_path * configuration_file - end - if !isfile(configuration_file) - throw(ArgumentError("Configuration file not found: " * configuration_file)) - end - config = YAML.load_file(configuration_file) - if (species in ["H", "H2", "D", "T", "T2"]) & !(species in keys(config)) - config = config["D2"] # They should all be the same - elseif (species in ["CH"]) & !(species in keys(config)) - config = config["CD"] - elseif (species in ["CH4", "CD4"]) & !(species in keys(config)) - config = config["CD4"] - else - config = config[species] - end - - if model == "simple" - function simple_gas_model(t, command) - flow0 = instant_gas_model(command, config) - t_ext = copy(t) - prepend!(t_ext, minimum(t) - config["delay"]) - flow0_ext = copy(flow0) - prepend!(flow0_ext, flow0[1]) - interp = Interpolations.linear_interpolation(t_ext, flow0_ext) - delayed_flow = interp.(t .- config["delay"]) - return lowpass_filter(t, delayed_flow, config["tau"]) - end - return simple_gas_model - elseif model == "instant" - instant_gas_model_(t, command) = instant_gas_model(command, config) - return instant_gas_model_ - else - throw(ArgumentError("Unrecognized model: " * model)) - end -end - -function instant_gas_model(command, config) - return config["p1"] .* (sqrt.(((command * config["p2"]) .^ 2.0 .+ 1) .- 1)) -end - -function lowpass_filter_(raw, previous_smooth, dt, tau) - return previous_smooth + dt / (dt + tau) * (raw - previous_smooth) -end - -function lowpass_filter(t, x, tau) - xs = zeros(length(t)) - for i ∈ 2:length(t) - xs[i] = lowpass_filter_(x[i], xs[i-1], t[i] - t[i-1], tau) - end - return xs -end diff --git a/test/Project.toml b/test/Project.toml deleted file mode 100644 index 495d8a9..0000000 --- a/test/Project.toml +++ /dev/null @@ -1,3 +0,0 @@ -[deps] -ArgParse = "c7e460c6-2fb9-53a9-8c5b-16f535851c63" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/runtests.jl b/test/runtests.jl index ff6f825..fef62d7 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,23 +1,20 @@ using SD4SOLPS: SD4SOLPS using SOLPS2IMAS: SOLPS2IMAS -using OMAS: OMAS +using IMASDD: IMASDD using EFIT: EFIT using Plots using Test using Unitful: Unitful using Interpolations: Interpolations using ArgParse: ArgParse -using GGDUtils: GGDUtils, get_grid_subset_with_index +using GGDUtils: GGDUtils, get_grid_subset function parse_commandline() s = ArgParse.ArgParseSettings(; description="Run tests. Default is all tests.") ArgParse.add_arg_table!(s, - ["--lightweight_utilities"], - Dict(:help => "Test only lightweight utilities", - :action => :store_true), - ["--actuator"], - Dict(:help => "Test only actuator model", + ["--unit_utils"], + Dict(:help => "Test only unit conversion utilities", :action => :store_true), ["--core_profile_extension"], Dict(:help => "Test only core profile extension", @@ -97,24 +94,22 @@ function define_default_sample_set() splitdir(pathof(SD4SOLPS))[1] * "/../sample/ITER_Lore_2296_00000/baserun" sample_path3 = splitdir(pathof(SD4SOLPS))[1] * "/../sample/ITER_Lore_2296_00000/run_restart" - sample_path4 = splitdir(pathof(SOLPS2IMAS))[1] * "/../samples" # Requires dvc pull before the full samples will be loaded # Sorry, the minimal samples are too minimal for this one. file_list = SD4SOLPS.find_files_in_allowed_folders( - sample_path, sample_path2, sample_path3, sample_path4; + sample_path, sample_path2, sample_path3; eqdsk_file="thereisntoneyet", - allow_reduced_versions=false, ) - b2fgmtry, b2time, b2mn, gridspec, eqdsk = file_list + b2fgmtry, b2time, b2mn, eqdsk = file_list eqdsk = splitdir(pathof(SD4SOLPS))[1] * "/../sample/ITER_Lore_2296_00000/EQDSK/g002296.00200" - return b2fgmtry, b2time, b2mn, gridspec, eqdsk + return b2fgmtry, b2time, b2mn, eqdsk end -if args["lightweight_utilities"] - @testset "lightweight_utilities" begin +if args["unit_utils"] + @testset "Unit conversion utilities" begin # Gas unit converter flow_tls = 40.63 * Unitful.Torr * Unitful.L / Unitful.s flow_pam3 = SD4SOLPS.gas_unit_converter(flow_tls, "torr L s^-1", "Pa m^3 s^-1") @@ -147,21 +142,6 @@ if args["lightweight_utilities"] end end -if args["actuator"] - @testset "actuator" begin - t = collect(LinRange(0, 2.0, 1001)) - cmd = (t .> 1.0) * 1.55 + (t .> 1.5) * 0.93 + (t .> 1.8) * 0.25 - - instant_flow_function = SD4SOLPS.model_gas_valve("instant") - instant_flow = instant_flow_function(t, cmd) - @test length(instant_flow) == length(cmd) - - simple_flow_function = SD4SOLPS.model_gas_valve("simple") - simple_flow = simple_flow_function(t, cmd) - @test length(simple_flow) == length(cmd) - end -end - if args["core_profile_extension"] @testset "core_profile_extension" begin # Just the basic profile extrapolator ------------------ @@ -173,20 +153,19 @@ if args["core_profile_extension"] # The full workflow -------------------------------------- # Setup sample DD - b2fgmtry, b2time, b2mn, gridspec, eqdsk = define_default_sample_set() + b2fgmtry, b2time, b2mn, eqdsk = define_default_sample_set() println(b2fgmtry) println(b2time) println(b2mn) - println(gridspec) println(eqdsk) # If these files don't exist, complete the DVC sample setup and try again @test isfile(b2fgmtry) @test isfile(b2time) @test isfile(b2mn) - @test isfile(gridspec) @test isfile(eqdsk) - dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time, gridspec, b2mn) - SD4SOLPS.geqdsk_to_imas!(eqdsk, dd) + eqdsk_time = parse(Float64, split(eqdsk, ".")[end]) / 1000.0 + dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time; b2mn=b2mn) + SD4SOLPS.geqdsk_to_imas!(eqdsk, dd; set_time=eqdsk_time) rho = dd.equilibrium.time_slice[1].profiles_1d.rho_tor_norm if !SD4SOLPS.check_rho_1d(dd; time_slice=1) @@ -220,9 +199,10 @@ end if args["edge_profile_extension"] @testset "edge_profile_extension" begin # Test for getting mesh spacing - b2fgmtry, b2time, b2mn, gridspec, eqdsk = define_default_sample_set() - dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time, gridspec, b2mn) - SD4SOLPS.geqdsk_to_imas!(eqdsk, dd) + b2fgmtry, b2time, b2mn, eqdsk = define_default_sample_set() + eqdsk_time = parse(Float64, split(eqdsk, ".")[end]) / 1000.0 + dd = SOLPS2IMAS.solps2imas(b2fgmtry, b2time; b2mn=b2mn) + SD4SOLPS.geqdsk_to_imas!(eqdsk, dd; set_time=eqdsk_time) dpsin = SD4SOLPS.mesh_psi_spacing(dd) @test dpsin > 0.0 @@ -231,7 +211,7 @@ if args["edge_profile_extension"] grid_ggd = dd.edge_profiles.grid_ggd[grid_ggd_idx] extended_subs = 1:5 orig_subs = [ - deepcopy(get_grid_subset_with_index(grid_ggd, i)) for + deepcopy(get_grid_subset(grid_ggd, i)) for i ∈ extended_subs ] cfn = SD4SOLPS.cached_mesh_extension!(dd, eqdsk, b2fgmtry; clear_cache=true) @@ -239,9 +219,9 @@ if args["edge_profile_extension"] SD4SOLPS.cached_mesh_extension!(dd, eqdsk, b2fgmtry; grid_ggd_idx=grid_ggd_idx) for j ∈ extended_subs orig_sub = orig_subs[j] - std_sub = get_grid_subset_with_index(grid_ggd, -j) - all_sub = get_grid_subset_with_index(grid_ggd, j) - ext_sub = get_grid_subset_with_index(grid_ggd, -200 - j) + std_sub = get_grid_subset(grid_ggd, -j) + all_sub = get_grid_subset(grid_ggd, j) + ext_sub = get_grid_subset(grid_ggd, -200 - j) orig_indices = [ele.object[1].index for ele ∈ orig_sub.element] std_indices = [ele.object[1].index for ele ∈ std_sub.element] all_indices = [ele.object[1].index for ele ∈ all_sub.element] @@ -276,24 +256,19 @@ end if args["heavy_utilities"] @testset "heavy_utilities" begin # Test for finding files in allowed folders - sample_path = splitdir(pathof(SOLPS2IMAS))[1] * "/../samples" - file_list = SD4SOLPS.find_files_in_allowed_folders( - sample_path; eqdsk_file="thereisntoneyet", allow_reduced_versions=true, - ) - @test length(file_list) == 5 - b2fgmtry, b2time, b2mn, gridspec, eqdsk = file_list + file_list = define_default_sample_set() + @test length(file_list) == 4 + b2fgmtry, b2time, b2mn, eqdsk = file_list @test length(b2fgmtry) > 10 @test endswith(b2fgmtry, "b2fgmtry_red") | endswith(b2fgmtry, "b2fgmtry") @test length(b2time) > 10 @test endswith(b2time, "b2time_red.nc") | endswith(b2time, "b2time.nc") @test length(b2mn) > 10 @test endswith(b2mn, "b2mn.dat") - @test length(gridspec) > 10 - @test endswith(gridspec, "gridspacedesc.yml") # Test for sweeping 1D core profiles into 2D R,Z # (or anyway evaluating them at any R,Z location) - dd = OMAS.dd() + dd = IMASDD.dd() eqdsk_file = splitdir(pathof(SD4SOLPS))[1] * "/../sample/geqdsk_iter_small_sample" SD4SOLPS.geqdsk_to_imas!(eqdsk_file, dd) @@ -302,8 +277,6 @@ if args["heavy_utilities"] resize!(dd.core_profiles.profiles_1d, prof_time_idx) n = 101 rho_n = Array(LinRange(0, 1.0, n)) - resize!(dd.core_profiles.profiles_1d[prof_time_idx].grid.rho_tor_norm, n) - resize!(dd.core_profiles.profiles_1d[prof_time_idx].electrons.density, n) dd.core_profiles.profiles_1d[prof_time_idx].grid.rho_tor_norm = rho_n dd.core_profiles.profiles_1d[prof_time_idx].electrons.density = make_test_profile(rho_n) @@ -319,7 +292,11 @@ if args["heavy_utilities"] println("DD was repaired (rho added) for core 2d utility test") end density_on_grid = - SD4SOLPS.core_profile_2d(dd, prof_time_idx, eq_time_idx, quantity).(r, z) + GGDUtils.interp( + dd.core_profiles.profiles_1d[1].electrons.density, + dd.core_profiles.profiles_1d[1], + dd.equilibrium.time_slice[1], + ).(r, z) @test size(density_on_grid) == (length(rg), length(zg)) end end @@ -327,7 +304,7 @@ end if args["repair_eq"] @testset "repair_eq" begin # Prepare sample - dd = OMAS.dd() + dd = IMASDD.dd() eqdsk = splitdir(pathof(SD4SOLPS))[1] * "/../sample/geqdsk_iter_small_sample" SD4SOLPS.geqdsk_to_imas!(eqdsk, dd) # Make sure rho is missing @@ -359,8 +336,18 @@ if args["geqdsk_to_imas"] tslice = 1 for sample_file ∈ sample_files println(sample_file) - dd = OMAS.dd() - SD4SOLPS.geqdsk_to_imas!(sample_file, dd; time_index=tslice) + dd = IMASDD.dd() + if endswith(sample_file, "00") + eqdsk_time = parse(Float64, split(sample_file, ".")[end]) / 1000.0 + else + eqdsk_time = nothing + end + SD4SOLPS.geqdsk_to_imas!( + sample_file, + dd; + set_time=eqdsk_time, + time_index=tslice, + ) eqt = dd.equilibrium.time_slice[tslice] # global @@ -435,7 +422,6 @@ if args["preparation"] eqdsk_file = "geqdsk_iter_small_sample" sample_paths = [ splitdir(pathof(SD4SOLPS))[1] * "/../sample", - splitdir(pathof(SOLPS2IMAS))[1] * "/../samples", ] core_method = "simple" edge_method = "simple" @@ -443,9 +429,8 @@ if args["preparation"] output_format = "json" dd = SD4SOLPS.preparation( eqdsk_file, - sample_paths...; + sample_paths; core_method=core_method, - edge_method=edge_method, filename=filename, output_format=output_format, )