From f4adfccf1df415b73d083a4f56a4209b941d7cf5 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:31:10 -0600 Subject: [PATCH 01/10] update libxml requiremnt in cmakelists --- CMakeLists.txt | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1bab54b4a6..3f6fc0b9cb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -102,6 +102,9 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- Dependency Library Hints (DEPS_LIB_HINTS): ${DEPS_LIB_HINTS}") MESSAGE("-- Dependency Include Hints (DEPS_INCLUDE_HINTS): ${DEPS_INCLUDE_HINTS}") + # Search pkg-config utility first + FIND_PACKAGE(PkgConfig REQUIRED) + # Find cyclus FIND_PACKAGE(Cyclus REQUIRED) SET( @@ -115,15 +118,13 @@ IF(NOT CYCLUS_DOC_ONLY) INCLUDE(UseCyclus) # Find LibXML++ and dependencies - FIND_PACKAGE(LibXML++) - IF(NOT LibXML++_LIBRARIES) - FIND_LIBRARY(LibXML++ REQUIRED ${DEPS_HINTS}) - ENDIF() - SET(CYCAMORE_INCLUDE_DIRS ${CYCAMORE_INCLUDE_DIRS} ${LibXML++_INCLUDE_DIR} ${Glibmm_INCLUDE_DIRS} ${LibXML++Config_INCLUDE_DIR}) - SET(LIBS ${LIBS} ${LibXML++_LIBRARIES}) - MESSAGE("-- LIBS: ${LIBS}") - - MESSAGE("-- LD_LIBRARY_PATH: $ENV{LD_LIBRARY_PATH}") + pkg_check_modules(LIBXMLXX IMPORTED_TARGET libxml++-4.0) + IF ( NOT LIBXMLXX_LIBRARIES ) + pkg_check_modules(LIBXMLXX REQUIRED IMPORTED_TARGET libxml++-2.6) + ENDIF ( NOT LIBXMLXX_LIBRARIES ) + SET(LIBS ${LIBS} ${LIBXMLXX_LIBRARIES}) + message("-- LibXML++ Include Dir: ${LIBXMLXX_INCLUDE_DIRS}") + message("-- LibXML++ Librarires: ${LIBXMLXX_LIBRARIES}") # Include the boost header files and the program_options library # Please be sure to use Boost rather than BOOST. From 6ee8f564656f0cd12f718639a510badf0895c8a4 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:32:38 -0600 Subject: [PATCH 02/10] update sqlite requirement in cmakelists --- CMakeLists.txt | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3f6fc0b9cb..155f41b9a8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -182,11 +182,12 @@ IF(NOT CYCLUS_DOC_ONLY) MESSAGE("-- COIN Include directories: ${COIN_INCLUDE_DIRS}") MESSAGE("-- COIN Libraries: ${COIN_LIBRARIES}") - FIND_PACKAGE( Sqlite3 REQUIRED ) - SET(CYCAMORE_INCLUDE_DIRS ${CYCAMORE_INCLUDE_DIRS} ${SQLITE3_INCLUDE_DIR}) - SET(LIBS ${LIBS} ${SQLITE3_LIBRARIES}) - MESSAGE("-- SQLITE3 Include directories: ${SQLITE3_INCLUDE_DIR}") - MESSAGE("-- SQLITE3 Libraries: ${SQLITE3_LIBRARIES}") + # find SQLite + FIND_PACKAGE( SQLite3 REQUIRED ) + SET(CYCAMORE_INCLUDE_DIRS ${CYCAMORE_INCLUDE_DIRS} ${SQLite3_INCLUDE_DIR}) + SET(LIBS ${LIBS} ${SQLite3_LIBRARIES}) + MESSAGE("-- SQLite3 Include directories: ${SQLite3_INCLUDE_DIR}") + MESSAGE("-- SQLite3 Libraries: ${SQLite_LIBRARIES}") # # Some optional libraries to link in, as availble. Required for conda. From 860eb11ef3063aa243a280116175e1ed5c35d6fe Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:34:19 -0600 Subject: [PATCH 03/10] add Dockerfile and workflows --- .github/workflows/build_test.yml | 58 ++++++++++++++++++++++ .github/workflows/build_test_publish.yml | 63 ++++++++++++++++++++++++ docker/Dockerfile | 16 ++++++ 3 files changed, 137 insertions(+) create mode 100644 .github/workflows/build_test.yml create mode 100644 .github/workflows/build_test_publish.yml create mode 100644 docker/Dockerfile diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml new file mode 100644 index 0000000000..f489dac0f0 --- /dev/null +++ b/.github/workflows/build_test.yml @@ -0,0 +1,58 @@ +name: Build/Test for PR and collaborator push + +on: + # allows us to run workflows manually + workflow_dispatch: + pull_request: + paths-ignore: + - '.github/workflows/build_test_publish.yml' + - 'docker/**' + - 'doc/**' + push: + paths-ignore: + - '.github/workflows/build_test_publish.yml' + - 'docker/**' + - 'doc/**' + +jobs: + build-and-test: + + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + ubuntu_versions : [ + 20.04, + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + cyclus_tag: [ + latest, + ] + + container: + image: ghcr.io/cyclus/cyclus_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }}/cyclus:${{matrix.cyclus_tag}} + + steps: + - name: Checkout Cycamore + uses: actions/checkout@v3 + + - name: Change Home + run: | + echo "HOME=/root" >> "$GITHUB_ENV" + + - name: Build Cycamore + run: | + python install.py --prefix=/root/.local -j 2 --build-type=Release --core-version 99999.99999 + + - name: Cycamore Unit Tests + run: | + cycamore_unit_tests + + - name: Cycamore Python Tests + run: | + cd tests && python -m pytest diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml new file mode 100644 index 0000000000..86a79d9fad --- /dev/null +++ b/.github/workflows/build_test_publish.yml @@ -0,0 +1,63 @@ +name: Build and Test Dependency Images + +on: + # allows us to run workflows manually + workflow_dispatch: + pull_request: + paths: + - '.github/workflows/build_test_publish.yml' + - 'docker/**' + push: + branches: + - main + paths: + - '.github/workflows/build_test_publish.yml' + - 'docker/**' + +jobs: + build-dependency-and-test-img: + runs-on: ubuntu-latest + + strategy: + matrix: + ubuntu_versions : [ + 20.04, + 22.04, + ] + pkg_mgr : [ + apt, + conda, + ] + + name: Installing Dependencies, Building cyclus and running tests + steps: + - name: default environment + run: | + echo "tag-latest-on-default=false" >> "$GITHUB_ENV" + + - name: condition on trigger parameters + if: ${{ github.repository_owner == 'cyclus' && github.ref == 'refs/heads/main' }} + run: | + echo "tag-latest-on-default=true" >> "$GITHUB_ENV" + + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Installing Dependencies in Docker image + uses: firehed/multistage-docker-build-action@v1 + with: + repository: ghcr.io/${{ github.repository_owner }}/cycamore_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} + stages: cycamore + server-stage: cycamore-test + quiet: false + parallel: true + tag-latest-on-default: ${{ env.tag-latest-on-default }} + dockerfile: docker/Dockerfile + build-args: pkg_mgr=${{ matrix.pkg_mgr }}, ubuntu_version=${{ matrix.ubuntu_versions }} \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000000..8d2d339526 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,16 @@ +ARG pkg_mgr=apt +ARG ubuntu_version=22.04 + +FROM ghcr.io/cyclus/cyclus_${ubuntu_version}_${pkg_mgr}/cyclus as cycamore +ARG make_cores=2 + +COPY . /cycamore +WORKDIR /cycamore + +RUN python install.py -j ${make_cores} --build-type=Release --core-version 99999.99999 + +FROM cycamore as cycamore-test +RUN cycamore_unit_tests + +FROM cycamore as cycamore-pytest +RUN cd tests && python -m pytest From aaf9e83ac54c0615f93102b50f563903d9160ddb Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:34:45 -0600 Subject: [PATCH 04/10] delete circle.yml --- circle.yml | 166 ----------------------------------------------------- 1 file changed, 166 deletions(-) delete mode 100644 circle.yml diff --git a/circle.yml b/circle.yml deleted file mode 100644 index c50d1ee010..0000000000 --- a/circle.yml +++ /dev/null @@ -1,166 +0,0 @@ -version: 2 - -jobs: - # Build Cyclus - build: - docker: - - image: cyclus/cyclus:latest - working_directory: ~/cycamore - steps: - # Ensure your image has git (required by git to clone via SSH) so that CircleCI can clone your repo - - run: apt-get -qq update; apt-get -y install git openssh-client - - checkout - - run: - name: Build Cycamore - command: | - python install.py -j 2 --build-type=Release \ - -DBLAS_LIBRARIES="/opt/conda/lib/libblas.so" \ - -DLAPACK_LIBRARIES="/opt/conda/lib/liblapack.so" - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - save_cache: - key: v1-repo-{{ checksum ".circle-sha" }} - paths: - - /root - - - - # Test - unit_test: - docker: - - image: cyclus/cyclus:latest - working_directory: ~/root - steps: - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Unit Test - command: /root/.local/bin/cycamore_unit_tests; exit $? - nosetest: - docker: - - image: cyclus/cyclus:latest - working_directory: ~/root - steps: - - run: - name: save SHA to a file - command: echo $CIRCLE_SHA1 > .circle-sha - - restore_cache: - keys: - - v1-repo-{{ checksum ".circle-sha" }} - - run: - name: Install nosetest - command: pip install nose - - run: - name: Nosetests - command: nosetests -w ~/cycamore/tests; exit $? - - - # Update docker container - deploy: # Cycamore -> Cycamore:latest - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cycamore - steps: - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/master-ci/Dockerfile . - - setup_remote_docker - - run: - name: log into Docker - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Build Docker container - command: docker build --rm=false -t cyclus/cycamore:latest . - - run: - name: Push on DockerHub - command: docker push cyclus/cycamore:latest # push to docker depot - - deploy_stable: # Cycamore:stable - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cycamore - steps: - - checkout - - run: - name: Place the proper Dockerfile - command: cp docker/release-ci/Dockerfile . - - setup_remote_docker - - run: - name: Log on DockerHub - command: | - docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: - name: Build Docker container - command: docker build -t cyclus/cycamore:stable . - - run: - name: Push on DockerHub - command: docker push cyclus/cycamore:stable # push to docker depot - - - # Debian package generation (on master update) - deb_generation: - docker: - - image: circleci/ruby:2.4-node - working_directory: ~/cycamore - steps: - - checkout - - setup_remote_docker - - run: - name: Tag and Push on DockerHub - no_output_timeout: "20m" - command: | - docker/deb-ci/build_upload_deb.sh 14 - docker/deb-ci/build_upload_deb.sh 16 - - -workflows: - version: 2 - build_and_test: - jobs: - - # on a pr // all branch - - build - - unit_test: - requires: - - build - - nosetest: - requires: - - build - - # Merge on Master - - deploy: - filters: - branches: - only: master - requires: - - unit_test - - nosetest - - - # The following should now be done on version tag. - - deploy_stable: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - requires: - - unit_test - - nosetest - - - deb_generation: - filters: - branches: - ignore: /.*/ - tags: - only: /.*/ - requires: - - unit_test - - nosetest From 04a5e37861da324c8c841b006dec4be04d7cb081 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:39:14 -0600 Subject: [PATCH 05/10] convert from nose to pytest --- tests/helper.py | 4 +- tests/test_regression.py | 365 ++++++++++++++++++--------------------- tests/test_run_inputs.py | 9 +- 3 files changed, 176 insertions(+), 202 deletions(-) diff --git a/tests/helper.py b/tests/helper.py index 1cb3c131e6..0340c323e2 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -5,8 +5,6 @@ import sys from hashlib import sha1 import numpy as np -import tables -from nose.tools import assert_equal CYCLUS_HAS_COIN = None @@ -85,7 +83,7 @@ def check_cmd(args, cwd, holdsrtn): f.seek(0) print("STDOUT + STDERR:\n\n" + f.read().decode()) holdsrtn[0] = rtn - assert_equal(rtn, 0) + assert rtn == 0 def cyclus_has_coin(): diff --git a/tests/test_regression.py b/tests/test_regression.py index ec4f2aa532..1c35b3e863 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -10,9 +10,7 @@ from numpy.testing import assert_almost_equal from cyclus.lib import Env -from nose.plugins.skip import SkipTest -from nose.tools import assert_equal, assert_true - +from pytest import skip import helper from helper import check_cmd, run_cyclus, table_exist, cyclus_has_coin @@ -24,7 +22,7 @@ def skip_if_dont_allow_milps(): """A don't run certain tests if MILPs are disabled.""" if not ALLOW_MILPS: - raise SkipTest("Cyclus was compiled without MILPS support or the " + raise skip("Cyclus was compiled without MILPS support or the " "ALLOW_MILPS env var was not set to true.") @@ -35,93 +33,93 @@ class TestRegression(object): tested, e.g., `self.inf_ = ./path/to/my/input_file.xml. See below for examples. """ - def __init__(self, *args, **kwargs): - self.ext = '.sqlite' - self.outf = str(uuid.uuid4()) + self.ext - self.inf = None - - def setUp(self): - if not self.inf: - raise TypeError(("self.inf must be set in derived classes " + @classmethod + def setup_class(cls, inf): + cls.ext = '.sqlite' + cls.outf = str(uuid.uuid4()) + cls.ext + cls.inf = inf + if not cls.inf: + raise TypeError(("cls.inf must be set in derived classes " "to run regression tests.")) - run_cyclus("cyclus", os.getcwd(), self.inf, self.outf) + run_cyclus("cyclus", os.getcwd(), cls.inf, cls.outf) # Get specific tables and columns - if self.ext == '.h5': - with tables.open_file(self.outf, mode="r") as f: + if cls.ext == '.h5': + with tables.open_file(cls.outf, mode="r") as f: # Get specific tables and columns - self.agent_entry = f.get_node("/AgentEntry")[:] - self.agent_exit = f.get_node("/AgentExit")[:] \ + cls.agent_entry = f.get_node("/AgentEntry")[:] + cls.agent_exit = f.get_node("/AgentExit")[:] \ if "/AgentExit" in f \ else None - self.enrichments = f.get_node("/Enrichments")[:] \ + cls.enrichments = f.get_node("/Enrichments")[:] \ if "/Enrichments" in f \ else None - self.resources = f.get_node("/Resources")[:] - self.transactions = f.get_node("/Transactions")[:] - self.compositions = f.get_node("/Compositions")[:] - self.info = f.get_node("/Info")[:] - self.rsrc_qtys = { - x["ResourceId"]: x["Quantity"] for x in self.resources} + cls.resources = f.get_node("/Resources")[:] + cls.transactions = f.get_node("/Transactions")[:] + cls.compositions = f.get_node("/Compositions")[:] + cls.info = f.get_node("/Info")[:] + cls.rsrc_qtys = { + x["ResourceId"]: x["Quantity"] for x in cls.resources} else: - self.conn = sqlite3.connect(self.outf) - self.conn.row_factory = sqlite3.Row - self.cur = self.conn.cursor() - exc = self.cur.execute - self.agent_entry = exc('SELECT * FROM AgentEntry').fetchall() - self.agent_exit = exc('SELECT * FROM AgentExit').fetchall() \ + cls.conn = sqlite3.connect(cls.outf) + cls.conn.row_factory = sqlite3.Row + cls.cur = cls.conn.cursor() + exc = cls.cur.execute + cls.agent_entry = exc('SELECT * FROM AgentEntry').fetchall() + cls.agent_exit = exc('SELECT * FROM AgentExit').fetchall() \ if len(exc( ("SELECT * FROM sqlite_master WHERE " "type='table' AND name='AgentExit'")).fetchall()) > 0 \ else None - self.enrichments = exc('SELECT * FROM Enrichments').fetchall() \ + cls.enrichments = exc('SELECT * FROM Enrichments').fetchall() \ if len(exc( ("SELECT * FROM sqlite_master WHERE " "type='table' AND name='Enrichments'")).fetchall()) > 0 \ else None - self.resources = exc('SELECT * FROM Resources').fetchall() - self.transactions = exc('SELECT * FROM Transactions').fetchall() - self.compositions = exc('SELECT * FROM Compositions').fetchall() - self.info = exc('SELECT * FROM Info').fetchall() - self.rsrc_qtys = { - x["ResourceId"]: x["Quantity"] for x in self.resources} - - def find_ids(self, spec, a, spec_col="Spec", id_col="AgentId"): - if self.ext == '.h5': + cls.resources = exc('SELECT * FROM Resources').fetchall() + cls.transactions = exc('SELECT * FROM Transactions').fetchall() + cls.compositions = exc('SELECT * FROM Compositions').fetchall() + cls.info = exc('SELECT * FROM Info').fetchall() + cls.rsrc_qtys = { + x["ResourceId"]: x["Quantity"] for x in cls.resources} + + @classmethod + def find_ids(cls, spec, a, spec_col="Spec", id_col="AgentId"): + if cls.ext == '.h5': return helper.find_ids(spec, a[spec_col], a[id_col]) else: return [x[id_col] for x in a if x[spec_col] == spec] - def to_ary(self, a, k): - if self.ext == '.sqlite': + @classmethod + def to_ary(cls, a, k): + if cls.ext == '.sqlite': return np.array([x[k] for x in a]) else: return a[k] - def tearDown(self): - if self.ext == '.sqlite': - self.conn.close() - if os.path.isfile(self.outf): - print("removing {0}".format(self.outf)) - os.remove(self.outf) + @classmethod + def teardown_class(cls): + if cls.ext == '.sqlite': + cls.conn.close() + if os.path.isfile(cls.outf): + print("removing {0}".format(cls.outf)) + os.remove(cls.outf) class _PhysorEnrichment(TestRegression): """This class tests the 1_Enrichment_2_Reactor.xml file related to the Cyclus Physor 2014 publication. The number of key facilities, the enrichment values, and the transactions to each reactor are tested. """ - def __init__(self, *args, **kwargs): - super(_PhysorEnrichment, self).__init__(*args, **kwargs) - - def setUp(self): - super(_PhysorEnrichment, self).setUp() - tbl = self.agent_entry - self.rx_id = self.find_ids(":cycamore:Reactor", tbl) - self.enr_id = self.find_ids(":cycamore:Enrichment", tbl) + @classmethod + def setup_class(cls, inf): + super(_PhysorEnrichment, cls).setup_class(inf) + tbl = cls.agent_entry + cls.rx_id = cls.find_ids(":cycamore:Reactor", tbl) + cls.enr_id = cls.find_ids(":cycamore:Enrichment", tbl) def test_deploy(self): - assert_equal(len(self.rx_id), 2) - assert_equal(len(self.enr_id), 1) + assert len(self.rx_id) == 2 + assert len(self.enr_id) == 1 def test_swu(self): enr = self.enrichments @@ -167,42 +165,39 @@ def test_xactions2(self): assert_array_almost_equal(exp, txs, decimal=2, err_msg=msg) class TestCBCPhysorEnrichment(_PhysorEnrichment): - def __init__(self, *args, **kwargs): - super(TestCBCPhysorEnrichment, self).__init__(*args, **kwargs) - self.inf = "../input/physor/1_Enrichment_2_Reactor.xml" + @classmethod + def setup_class(cls): skip_if_dont_allow_milps() + super(TestCBCPhysorEnrichment, cls).setup_class("../input/physor/1_Enrichment_2_Reactor.xml") class TestGreedyPhysorEnrichment(_PhysorEnrichment): - def __init__(self, *args, **kwargs): - super(TestGreedyPhysorEnrichment, self).__init__(*args, **kwargs) - self.inf = "../input/physor/greedy_1_Enrichment_2_Reactor.xml" + @classmethod + def setup_class(cls): + super(TestGreedyPhysorEnrichment, cls).setup_class("../input/physor/greedy_1_Enrichment_2_Reactor.xml") class _PhysorSources(TestRegression): """This class tests the 2_Sources_3_Reactor.xml file related to the Cyclus Physor 2014 publication. Reactor deployment and transactions between suppliers and reactors are tested. """ - def __init__(self, *args, **kwargs): - super(_PhysorSources, self).__init__(*args, **kwargs) - - def setUp(self): - super(_PhysorSources, self).setUp() - + @classmethod + def setup_class(cls, inf): + super(_PhysorSources, cls).setup_class(inf) # identify each reactor and supplier by id - tbl = self.agent_entry - rx_id = self.find_ids(":cycamore:Reactor", tbl) - self.r1, self.r2, self.r3 = tuple(rx_id) - s_id = self.find_ids(":cycamore:Source", tbl) - self.smox = self.transactions[0]["SenderId"] - s_id.remove(self.smox) - self.suox = s_id[0] + tbl = cls.agent_entry + rx_id = cls.find_ids(":cycamore:Reactor", tbl) + cls.r1, cls.r2, cls.r3 = tuple(rx_id) + s_id = cls.find_ids(":cycamore:Source", tbl) + cls.smox = cls.transactions[0]["SenderId"] + s_id.remove(cls.smox) + cls.suox = s_id[0] def test_rxtr_deployment(self): depl_time = {x["AgentId"]: x["EnterTime"] for x in self.agent_entry} - assert_equal(depl_time[self.r1], 1) - assert_equal(depl_time[self.r2], 2) - assert_equal(depl_time[self.r3], 3) + assert depl_time[self.r1] == 1 + assert depl_time[self.r2] == 2 + assert depl_time[self.r3] == 3 def test_rxtr1_xactions(self): mox_exp = [0, 1, 1, 1, 0] @@ -250,15 +245,15 @@ def test_rxtr3_xactions(self): assert_array_almost_equal(uox_exp, txs) class TestCBCPhysorSources(_PhysorSources): - def __init__(self, *args, **kwargs): - super(TestCBCPhysorSources, self).__init__(*args, **kwargs) - self.inf = "../input/physor/2_Sources_3_Reactors.xml" + @classmethod + def setup_class(cls): skip_if_dont_allow_milps() + super(TestCBCPhysorSources, cls).setup_class("../input/physor/2_Sources_3_Reactors.xml") class TestGreedyPhysorSources(_PhysorSources): - def __init__(self, *args, **kwargs): - super(TestGreedyPhysorSources, self).__init__(*args, **kwargs) - self.inf = "../input/physor/greedy_2_Sources_3_Reactors.xml" + @classmethod + def setup_class(cls): + return super(TestGreedyPhysorSources, cls).setup_class("../input/physor/greedy_2_Sources_3_Reactors.xml") class TestDynamicCapacitated(TestRegression): """Tests dynamic capacity restraints involving changes in the number of @@ -278,81 +273,71 @@ class TestDynamicCapacitated(TestRegression): facilities being the constraint. At time step 3, after decommissioning 2 older sink facilities, the remaining number of sink facilities becomes the constraint, resulting in the same transaction amount as in time step 1. - """ - def __init__(self, *args, **kwargs): - super(TestDynamicCapacitated, self).__init__(*args, **kwargs) - self.inf = "./input/dynamic_capacitated.xml" + """ + @classmethod + def setup_class(cls): + super(TestDynamicCapacitated, cls).setup_class("./input/dynamic_capacitated.xml") if not cyclus_has_coin(): - raise SkipTest('Cyclus not compiled with COIN') - - def setUp(self): - super(TestDynamicCapacitated, self).setUp() + raise skip('Cyclus not compiled with COIN') # Find agent ids of source and sink facilities - self.agent_ids = self.to_ary(self.agent_entry, "AgentId") - self.agent_impl = self.to_ary(self.agent_entry, "Spec") - self.depl_time = self.to_ary(self.agent_entry, "EnterTime") - self.exit_time = self.to_ary(self.agent_exit, "ExitTime") - self.exit_ids = self.to_ary(self.agent_exit, "AgentId") - self.source_id = self.find_ids(":cycamore:Source", self.agent_entry) - self.sink_id = self.find_ids(":cycamore:Sink", self.agent_entry) + cls.agent_ids = cls.to_ary(cls.agent_entry, "AgentId") + cls.agent_impl = cls.to_ary(cls.agent_entry, "Spec") + cls.depl_time = cls.to_ary(cls.agent_entry, "EnterTime") + cls.exit_time = cls.to_ary(cls.agent_exit, "ExitTime") + cls.exit_ids = cls.to_ary(cls.agent_exit, "AgentId") + cls.source_id = cls.find_ids(":cycamore:Source", cls.agent_entry) + cls.sink_id = cls.find_ids(":cycamore:Sink", cls.agent_entry) # Check transactions - self.sender_ids = self.to_ary(self.transactions, "SenderId") - self.receiver_ids = self.to_ary(self.transactions, "ReceiverId") - self.trans_time = self.to_ary(self.transactions, "Time") - self.trans_resource = self.to_ary(self.transactions, "ResourceId") + cls.sender_ids = cls.to_ary(cls.transactions, "SenderId") + cls.receiver_ids = cls.to_ary(cls.transactions, "ReceiverId") + cls.trans_time = cls.to_ary(cls.transactions, "Time") + cls.trans_resource = cls.to_ary(cls.transactions, "ResourceId") # Track transacted resources - self.resource_ids = self.to_ary(self.resources, "ResourceId") - self.quantities = self.to_ary(self.resources, "Quantity") + cls.resource_ids = cls.to_ary(cls.resources, "ResourceId") + cls.quantities = cls.to_ary(cls.resources, "Quantity") - def tearDown(self): - super(TestDynamicCapacitated, self).tearDown() + @classmethod + def teardown_class(cls): + super(TestDynamicCapacitated, cls).teardown_class() def test_source_deployment(self): # test number of sources - assert_equal(len(self.source_id), 3) + assert len(self.source_id) == 3 # Test that source facilities are all deployed at time step 1 for s in self.source_id: - assert_equal(self.depl_time[np.where(self.agent_ids == s)], 1) + assert self.depl_time[np.where(self.agent_ids == s)] == 1 def test_sink_deployment(self): # test number of sinks - assert_equal(len(self.sink_id), 4) + assert len(self.sink_id) == 4 # Test that first 2 sink facilities are deployed at time step 1 # and decommissioned at time step 2 for i in [0, 1]: - assert_equal( - self.depl_time[np.where(self.agent_ids == self.sink_id[i])][0], - 1) - assert_equal( - self.exit_time[np.where(self.exit_ids == self.sink_id[i])][0], - 2) + assert self.depl_time[np.where(self.agent_ids == self.sink_id[i])][0] == 1 + assert self.exit_time[np.where(self.exit_ids == self.sink_id[i])][0] == 2 # Test that second 2 sink facilities are deployed at time step 2 # and decommissioned at time step 3 for i in [2, 3]: - assert_equal( - self.depl_time[np.where(self.agent_ids == self.sink_id[i])][0], - 2) - assert_equal( - self.exit_time[np.where(self.exit_ids == self.sink_id[i])][0], - 3) + assert self.depl_time[np.where(self.agent_ids == self.sink_id[i])][0] == 2 + assert self.exit_time[np.where(self.exit_ids == self.sink_id[i])][0] == 3 def test_xaction_general(self): # Check that transactions are between sources and sinks only for s in self.sender_ids: - assert_equal(len(np.where(self.source_id == s)[0]), 1) + assert len(np.where(self.source_id == s)[0]) == 1 for r in self.receiver_ids: - assert_equal(len(np.where(self.sink_id == r)[0]), 1) + assert len(np.where(self.sink_id == r)[0]) == 1 # Total expected number of transactions - assert_equal(len(self.trans_time), 7) + assert len(self.trans_time) == 7 # Check that at time step 1, there are 2 transactions - assert_equal(len(np.where(self.trans_time == 1)[0]), 2) + assert len(np.where(self.trans_time == 1)[0]) == 2 # Check that at time step 2, there are 3 transactions - assert_equal(len(np.where(self.trans_time == 2)[0]), 3) + assert len(np.where(self.trans_time == 2)[0]) == 3 # Check that at time step 3, there are 2 transactions - assert_equal(len(np.where(self.trans_time == 3)[0]), 2) + assert len(np.where(self.trans_time == 3)[0]) == 2 def test_xaction_specific(self): # Check that at time step 1, there are 2 transactions with total @@ -361,7 +346,7 @@ def test_xaction_specific(self): for t in np.where(self.trans_time == 1)[0]: quantity += self.quantities[ np.where(self.resource_ids == self.trans_resource[t])] - assert_equal(quantity, 2) + assert quantity == 2 # Check that at time step 2, there are 3 transactions with total # amount of 3 @@ -369,7 +354,7 @@ def test_xaction_specific(self): for t in np.where(self.trans_time == 2)[0]: quantity += self.quantities[ np.where(self.resource_ids == self.trans_resource[t])] - assert_equal(quantity, 3) + assert quantity == 3 # Check that at time step 3, there are 2 transactions with total # amount of 2 @@ -377,7 +362,7 @@ def test_xaction_specific(self): for t in np.where(self.trans_time == 3)[0]: quantity += self.quantities[ np.where(self.resource_ids == self.trans_resource[t])] - assert_equal(quantity, 2) + assert quantity == 2 class TestGrowth1(TestRegression): """This class tests the growth.xml @@ -393,17 +378,15 @@ class TestGrowth1(TestRegression): A linear growth demand (y = 0x + 3) for a second commodity is provided at t=2 to test the demand for multiple commodities. """ - def __init__(self, *args, **kwargs): - super(TestGrowth1, self).__init__(*args, **kwargs) - self.inf = "./input/growth.xml" + @classmethod + def setup_class(cls): + super(TestGrowth1, cls).setup_class("./input/growth.xml") if not cyclus_has_coin(): - raise SkipTest('Cyclus not compiled with COIN') + raise skip('Cyclus not compiled with COIN') - def setUp(self): - super(TestGrowth1, self).setUp() - - def tearDown(self): - super(TestGrowth1, self).tearDown() + @classmethod + def teardown_class(cls): + super(TestGrowth1, cls).teardown_class() def test_deployment(self): pass @@ -418,15 +401,15 @@ def test_deployment(self): source3_id = self.find_ids("Source3", self.agent_entry, spec_col="Prototype") - assert_equal(len(source2_id), 1) - assert_equal(len(source1_id), 2) - assert_equal(len(source3_id), 3) + assert len(source2_id) == 1 + assert len(source1_id) == 2 + assert len(source3_id) == 3 - assert_equal(enter_time[np.where(agent_ids == source2_id[0])], 1) - assert_equal(enter_time[np.where(agent_ids == source1_id[0])], 2) - assert_equal(enter_time[np.where(agent_ids == source1_id[1])], 3) + assert enter_time[np.where(agent_ids == source2_id[0])] == 1 + assert enter_time[np.where(agent_ids == source1_id[0])] == 2 + assert enter_time[np.where(agent_ids == source1_id[1])] == 3 for x in source3_id: - assert_equal(enter_time[np.where(agent_ids == x)], 2) + assert enter_time[np.where(agent_ids == x)] == 2 class TestGrowth2(TestRegression): """This class tests the ./input/deploy_and_manager_insts.xml @@ -440,17 +423,15 @@ class TestGrowth2(TestRegression): t=6, 4 1-capacity Source2s are expected to be built by the ManagerInst. """ - def __init__(self, *args, **kwargs): - super(TestGrowth2, self).__init__(*args, **kwargs) - self.inf = "../input/growth/deploy_and_manager_insts.xml" + @classmethod + def setup_class(cls): + super(TestGrowth2, cls).setup_class("../input/growth/deploy_and_manager_insts.xml") if not cyclus_has_coin(): - raise SkipTest('Cyclus not compiled with COIN') - - def setUp(self): - super(TestGrowth2, self).setUp() + raise skip('Cyclus not compiled with COIN') - def tearDown(self): - super(TestGrowth2, self).tearDown() + @classmethod + def teardown_class(cls): + super(TestGrowth2, cls).teardown_class() def test_deployment(self): pass @@ -463,11 +444,11 @@ def test_deployment(self): source2_id = self.find_ids("Source2", self.agent_entry, spec_col="Prototype") - assert_equal(len(source1_id), 1) - assert_equal(len(source2_id), 4) + assert len(source1_id) == 1 + assert len(source2_id) == 4 - assert_equal(enter_time[np.where(agent_ids == source1_id[0])], 1) - assert_equal(enter_time[np.where(agent_ids == source2_id[0])], 6) + assert enter_time[np.where(agent_ids == source1_id[0])] == 1 + assert enter_time[np.where(agent_ids == source2_id[0])] == 6 class TestDeployInst(TestRegression): """This class tests the ../input/deploy_inst.xml @@ -481,18 +462,16 @@ class TestDeployInst(TestRegression): Sink agents are deployed at their respecitve times and that the correct number of these agents are deployed. - """ - def __init__(self, *args, **kwargs): - super(TestDeployInst, self).__init__(*args, **kwargs) - self.inf = "../input/deploy_inst.xml" + """ + @classmethod + def setup_class(cls): + super(TestDeployInst, cls).setup_class("../input/deploy_inst.xml") if not cyclus_has_coin(): - raise SkipTest('Cyclus not compiled with COIN') + raise skip('Cyclus not compiled with COIN') - def setUp(self): - super(TestDeployInst, self).setUp() - - def tearDown(self): - super(TestDeployInst, self).tearDown() + @classmethod + def teardown_class(cls): + super(TestDeployInst, cls).teardown_class() def test_deployment(self): pass @@ -504,24 +483,22 @@ def test_deployment(self): spec_col="Prototype") sink_id = self.find_ids("Sink", self.agent_entry, spec_col="Prototype") - assert_equal(len(source_id), 1) - assert_equal(len(sink_id), 1) + assert len(source_id) == 1 + assert len(sink_id) == 1 - assert_equal(enter_time[np.where(agent_ids == source_id[0])], 1) - assert_equal(enter_time[np.where(agent_ids == sink_id[0])], 0) + assert enter_time[np.where(agent_ids == source_id[0])] == 1 + assert enter_time[np.where(agent_ids == sink_id[0])] == 0 class _Recycle(TestRegression): """This class tests the input/recycle.xml file. - """ - def __init__(self, *args, **kwargs): - super(_Recycle, self).__init__(*args, **kwargs) - - # this test requires separations which isn't supported by hdf5 - # so we force sqlite: - base, _ = os.path.splitext(self.outf) - self.ext = '.sqlite' - self.outf = base + self.ext - self.sql = """ + """ + @classmethod + def setup_class(cls, inf): + super(_Recycle, cls).setup_class(inf) + base, _ = os.path.splitext(cls.outf) + cls.ext = '.sqlite' + cls.outf = base + cls.ext + cls.sql = """ SELECT t.time as time,SUM(c.massfrac*r.quantity) as qty FROM transactions as t JOIN resources as r ON t.resourceid=r.resourceid AND r.simid=t.simid JOIN agententry as send ON t.senderid=send.agentid AND send.simid=t.simid @@ -604,14 +581,14 @@ def test_pu239_reactor_repo(self): class TestGreedyRecycle(_Recycle): """This class tests the input/recycle.xml file. """ - def __init__(self, *args, **kwargs): - super(TestGreedyRecycle, self).__init__(*args, **kwargs) - self.inf = "../input/greedy_recycle.xml" + @classmethod + def setup_class(cls): + super(TestGreedyRecycle, cls).setup_class("../input/greedy_recycle.xml") -class TestCbcRecycle(_Recycle): +class TestCBCRecycle(_Recycle): """This class tests the input/recycle.xml file. - """ - def __init__(self, *args, **kwargs): - super(TestCbcRecycle, self).__init__(*args, **kwargs) - self.inf = "../input/recycle.xml" + """ + @classmethod + def setup_class(cls): skip_if_dont_allow_milps() + super(TestCBCRecycle, cls).setup_class("../input/recycle.xml") diff --git a/tests/test_run_inputs.py b/tests/test_run_inputs.py index ad6fd76345..55342ca3c5 100644 --- a/tests/test_run_inputs.py +++ b/tests/test_run_inputs.py @@ -1,15 +1,14 @@ import os import subprocess -from nose.tools import assert_true -from nose.plugins.skip import SkipTest +from pytest import skip import run_inputs as ri from helper import cyclus_has_coin def coin_skipper(filename): - raise SkipTest(filename + " cannot be executed since Cyclus was not installed " + raise skip(filename + " cannot be executed since Cyclus was not installed " "with COIN support") @@ -22,6 +21,6 @@ def test_inputs(): if cyclus_has_coin() or "GrowthRegion" not in src: testf = ri.TestFile(ri.cyclus_path, f, "-v0") testf.run() - yield assert_true, testf.passed, "Failed running {}".format(f) + assert testf.passed, "Failed running {}".format(f) else: - yield coin_skipper, absfile + coin_skipper(absfile) From c1adf0b69a0b3a12457a7a5fce379a33ec903fd9 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 08:53:16 -0600 Subject: [PATCH 06/10] change names in workflow to be more descriptive --- .github/workflows/build_test_publish.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_test_publish.yml b/.github/workflows/build_test_publish.yml index 86a79d9fad..f1c0ff0fe3 100644 --- a/.github/workflows/build_test_publish.yml +++ b/.github/workflows/build_test_publish.yml @@ -1,4 +1,4 @@ -name: Build and Test Dependency Images +name: Build and Publish Cycamore Images on: # allows us to run workflows manually @@ -23,13 +23,13 @@ jobs: ubuntu_versions : [ 20.04, 22.04, - ] + ] pkg_mgr : [ apt, conda, ] - name: Installing Dependencies, Building cyclus and running tests + name: Installing Dependencies, Building Cycamore and Running Tests steps: - name: default environment run: | @@ -50,7 +50,7 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 - - name: Installing Dependencies in Docker image + - name: Multi-Stage Build Action uses: firehed/multistage-docker-build-action@v1 with: repository: ghcr.io/${{ github.repository_owner }}/cycamore_${{ matrix.ubuntu_versions }}_${{ matrix.pkg_mgr }} From 65c5d0824714d47d56ccbd3523f6dbbd34c4c472 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 16:41:13 -0600 Subject: [PATCH 07/10] resolve docker config errors in workflows squash 10 debugging commits --- .github/workflows/build_test.yml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index f489dac0f0..a7272f5592 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -23,12 +23,12 @@ jobs: fail-fast: false matrix: ubuntu_versions : [ - 20.04, + # 20.04, 22.04, ] pkg_mgr : [ apt, - conda, + # conda, ] cyclus_tag: [ latest, @@ -41,13 +41,9 @@ jobs: - name: Checkout Cycamore uses: actions/checkout@v3 - - name: Change Home - run: | - echo "HOME=/root" >> "$GITHUB_ENV" - - name: Build Cycamore run: | - python install.py --prefix=/root/.local -j 2 --build-type=Release --core-version 99999.99999 + python install.py --prefix=/root/.local --cyclus-root=/root/.local -j 2 --build-type=Release --core-version 99999.99999 - name: Cycamore Unit Tests run: | @@ -55,4 +51,5 @@ jobs: - name: Cycamore Python Tests run: | + export PYTHONPATH=$(find /root/.local/lib -type d -name 'cyclus-*' -print -quit) cd tests && python -m pytest From 2c977e08bdf5a71db087fa3e50bd01acae7654f0 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Sun, 12 Nov 2023 19:14:22 -0600 Subject: [PATCH 08/10] full testing matrix --- .github/workflows/build_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index a7272f5592..7c0c07b651 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -23,12 +23,12 @@ jobs: fail-fast: false matrix: ubuntu_versions : [ - # 20.04, + 20.04, 22.04, ] pkg_mgr : [ apt, - # conda, + conda, ] cyclus_tag: [ latest, From d61ffe6d5677a968921f88daa2ed57725ce5545d Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Mon, 13 Nov 2023 09:18:28 -0600 Subject: [PATCH 09/10] less ambigous find command --- .github/workflows/build_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 7c0c07b651..dea7b7444c 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -51,5 +51,5 @@ jobs: - name: Cycamore Python Tests run: | - export PYTHONPATH=$(find /root/.local/lib -type d -name 'cyclus-*' -print -quit) + export PYTHONPATH=$(find /root/.local/lib -type d -name 'cyclus-*-*.egg' -print -quit) cd tests && python -m pytest From a1d337fb8c99b7b37314b34dcaeac99deac5e7d7 Mon Sep 17 00:00:00 2001 From: Ben Nibbelink Date: Mon, 13 Nov 2023 09:31:02 -0600 Subject: [PATCH 10/10] use fixture in test_run_inputs.py --- tests/test_run_inputs.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/tests/test_run_inputs.py b/tests/test_run_inputs.py index 55342ca3c5..0745420638 100644 --- a/tests/test_run_inputs.py +++ b/tests/test_run_inputs.py @@ -2,6 +2,7 @@ import subprocess from pytest import skip +import pytest import run_inputs as ri from helper import cyclus_has_coin @@ -11,16 +12,22 @@ def coin_skipper(filename): raise skip(filename + " cannot be executed since Cyclus was not installed " "with COIN support") - -def test_inputs(): +def get_files(): files, _, _ = ri.get_files(ri.input_path) for f in files: - absfile = os.path.join(ri.input_path, f) - with open(absfile) as fh: - src = fh.read() - if cyclus_has_coin() or "GrowthRegion" not in src: - testf = ri.TestFile(ri.cyclus_path, f, "-v0") - testf.run() - assert testf.passed, "Failed running {}".format(f) - else: - coin_skipper(absfile) + yield f + +@pytest.fixture(params=get_files()) +def file_fixture(request): + return request.param + +def test_inputs(file_fixture): + absfile = os.path.join(ri.input_path, file_fixture) + with open(absfile) as fh: + src = fh.read() + if cyclus_has_coin() or "GrowthRegion" not in src: + testf = ri.TestFile(ri.cyclus_path, file_fixture, "-v0") + testf.run() + assert testf.passed, "Failed running {}".format(file_fixture) + else: + coin_skipper(absfile)