diff --git a/.gitattributes b/.gitattributes
index 3bcb6bb5..d299ffa9 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,4 +1,8 @@
-*.pt filter=lfs diff=lfs merge=lfs -text
-*.geoh5 filter=lfs diff=lfs merge=lfs -text
-/assets/** filter=lfs diff=lfs merge=lfs -text
-*.h5 filter=lfs diff=lfs merge=lfs -text
+# can only push Git LFS objects to public forks if the repository network already has Git LFS objects
+# or you have write access to the root of the repository network.
+# (from https://docs.github.com/en/repositories/working-with-files/managing-large-files/collaboration-with-git-large-file-storage)
+#
+#*.pt filter=lfs diff=lfs merge=lfs -text
+#*.geoh5 filter=lfs diff=lfs merge=lfs -text
+#/assets/** filter=lfs diff=lfs merge=lfs -text
+#*.h5 filter=lfs diff=lfs merge=lfs -text
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c615acd4..674cf262 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -10,7 +10,7 @@ ci:
repos:
- repo: https://github.com/psf/black
- rev: 23.1.0
+ rev: 23.3.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
@@ -28,12 +28,12 @@ repos:
- id: flake8
files: ^omf/fileio/geoh5*
- repo: https://github.com/asottile/pyupgrade
- rev: v3.3.1
+ rev: v3.4.0
hooks:
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.0.0
+ rev: v1.3.0
hooks:
- id: mypy
additional_dependencies: [types-six]
@@ -52,10 +52,10 @@ repos:
exclude: (devtools|docs)/
files: ^(omf/fileio/geoh5|tests/)
- repo: https://github.com/codespell-project/codespell
- rev: v2.2.2
+ rev: v2.2.4
hooks:
- id: codespell
- exclude: (\.ipynb$)
+ exclude: (\.ipynb$|^poetry.lock$)
entry: codespell -I .codespellignore
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
@@ -74,7 +74,7 @@ repos:
- id: mixed-line-ending
- id: name-tests-test
- repo: https://github.com/rstcheck/rstcheck
- rev: v6.1.1
+ rev: v6.1.2
hooks:
- id: rstcheck
additional_dependencies: [sphinx]
diff --git a/README.rst b/README.rst
index 1cdc97ba..199ce848 100644
--- a/README.rst
+++ b/README.rst
@@ -21,7 +21,7 @@ omf
Version: 3.1.0-alpha.1
API library for Open Mining Format, a new standard for mining data backed by
-the `Global Mining Standards & Guidelines Group `_.
+the `Global Mining Standards & Guidelines Group `_.
.. warning::
**Pre-Release Notice**
diff --git a/docs/conf.py b/docs/conf.py
index bf1733e9..041d3eac 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -62,9 +62,9 @@
# built documents.
#
# The short X.Y version.
-version = u'0.9.3'
+version = u'3.0.0'
# The full version, including alpha/beta/rc tags.
-release = u'0.9.3'
+release = u'3.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/omf/fileio/geoh5.py b/omf/fileio/geoh5.py
index c518ebdd..f26213a9 100644
--- a/omf/fileio/geoh5.py
+++ b/omf/fileio/geoh5.py
@@ -647,19 +647,25 @@ def collect_omf_attributes(element: MappedData, **kwargs) -> dict:
if not element.legends:
return kwargs
- ind = 0
- alpha = 0
- value_map = {ind: "Unknown"}
- color_map = [np.r_[ind, [0, 0, 0], alpha]]
+ alpha = 1.0
+ value_map = {0: "Unknown"}
+ color_map = [np.r_[0, [0, 0, 0], alpha]]
- for name, rgb in zip(element.legends[0].values, element.legends[1].values):
- ind += 1
+ for legend in element.legends:
+ if isinstance(legend.values, StringArray):
+ for count, name in enumerate(legend.values):
+ if str(name).lower() == "unknown":
+ name = f"[{str(name).upper()}]"
- if str(name).lower() == "unknown":
- name = f"[{str(name).upper()}]"
+ value_map[count + 1] = str(name)
- value_map[ind] = str(name)
- color_map.append(np.r_[ind, rgb, alpha])
+ else:
+ color_map.append(
+ [
+ np.r_[count + 1, val, alpha]
+ for count, val in enumerate(legend.values)
+ ]
+ )
kwargs["value_map"] = value_map
kwargs["type"] = "referenced"
diff --git a/omf/scripts/geoh5_to_omf.py b/omf/scripts/geoh5_to_omf.py
index 82f9daf1..cd3d28e4 100644
--- a/omf/scripts/geoh5_to_omf.py
+++ b/omf/scripts/geoh5_to_omf.py
@@ -1,3 +1,4 @@
+import argparse
import logging
import sys
from pathlib import Path
@@ -9,11 +10,19 @@
def run():
- geoh5_filepath = Path(sys.argv[1])
- if len(sys.argv) < 3:
+ parser = argparse.ArgumentParser(
+ prog="geoh5_to_omf",
+ description="Converts a geoh5 file to a new OMF file.",
+ )
+ parser.add_argument("geoh5_file", type=Path)
+ parser.add_argument("-o", "--out", type=Path, required=False, default=None)
+ args = parser.parse_args()
+
+ geoh5_filepath = args.geoh5_file
+ if args.out is None:
output_filepath = geoh5_filepath.with_suffix(".omf")
else:
- output_filepath = Path(sys.argv[2])
+ output_filepath = args.out
if not output_filepath.suffix:
output_filepath = output_filepath.with_suffix(".omf")
if output_filepath.exists():
diff --git a/omf/scripts/omf_to_geoh5.py b/omf/scripts/omf_to_geoh5.py
index 71ccac02..b7ecc7e1 100644
--- a/omf/scripts/omf_to_geoh5.py
+++ b/omf/scripts/omf_to_geoh5.py
@@ -1,3 +1,4 @@
+import argparse
import logging
import sys
from pathlib import Path
@@ -9,24 +10,38 @@
def run():
- omf_filepath = Path(sys.argv[1])
- output_filepath = omf_filepath.with_suffix(".geoh5")
- compression = 5
-
- if len(sys.argv) < 3:
+ parser = argparse.ArgumentParser(
+ prog="omf_to_geoh5",
+ description="Converts an OMF file to a new geoh5 file.",
+ )
+ parser.add_argument("omf_file", type=Path, help="Path to the OMF file to convert.")
+ parser.add_argument(
+ "-o",
+ "--out",
+ type=Path,
+ required=False,
+ default=None,
+ help=(
+ "Path to the output geoh5 file. If not specified, create the output file "
+ "at the same location as the input file, but with the geoh5 extension."
+ ),
+ )
+ parser.add_argument(
+ "--gzip",
+ type=int,
+ choices=range(0, 10),
+ default=5,
+ help="Gzip compression level (0-9) for h5 data.",
+ )
+ args = parser.parse_args()
+
+ omf_filepath = args.omf_file
+ if args.out is None:
output_filepath = omf_filepath.with_suffix(".geoh5")
else:
- if sys.argv[2].isdigit():
- compression = sys.argv[2]
- if len(sys.argv) > 3:
- output_filepath = Path(sys.argv[3])
- elif isinstance(sys.argv[2], str):
- output_filepath = Path(sys.argv[2])
- if not output_filepath.suffix:
- output_filepath = output_filepath.with_suffix(".geoh5")
- if len(sys.argv) > 3:
- compression = sys.argv[3]
-
+ output_filepath = args.out
+ if not output_filepath.suffix:
+ output_filepath = output_filepath.with_suffix(".geoh5")
if output_filepath.exists():
_logger.error(
"Cowardly refuses to overwrite existing file '%s'.", output_filepath
@@ -34,7 +49,7 @@ def run():
sys.exit(1)
reader = OMFReader(str(omf_filepath.absolute()))
- GeoH5Writer(reader.get_project(), output_filepath, compression=compression)
+ GeoH5Writer(reader.get_project(), output_filepath, compression=args.gzip)
_logger.info("geoh5 file created: %s", output_filepath)
diff --git a/poetry.lock b/poetry.lock
index efbd9982..9e4ca23b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -44,13 +44,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "certifi"
-version = "2023.7.22"
+version = "2023.11.17"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
- {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
+ {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"},
+ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"},
]
[[package]]
@@ -257,13 +257,13 @@ files = [
[[package]]
name = "exceptiongroup"
-version = "1.1.3"
+version = "1.2.0"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
- {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
+ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
+ {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
]
[package.extras]
@@ -271,13 +271,13 @@ test = ["pytest (>=6)"]
[[package]]
name = "geoh5py"
-version = "0.8.0rc2"
+version = "0.8.0rc3"
description = "Python API for geoh5, an open file format for geoscientific data"
optional = false
python-versions = ">=3.8,<3.11"
files = [
- {file = "geoh5py-0.8.0rc2-py3-none-any.whl", hash = "sha256:e63d1e6b51c2ffbf648f17d2b459e3a55e7ec172a3936843d1f4b658b6c3c051"},
- {file = "geoh5py-0.8.0rc2.tar.gz", hash = "sha256:659b434d1430361a1941c6bf9d67dd0e78ddef91af0178720dbb905235cede39"},
+ {file = "geoh5py-0.8.0rc3-py3-none-any.whl", hash = "sha256:76625e1fccdf20f102d310c2bfae989bfb3995fb24b5f8091ada0c5e1474184c"},
+ {file = "geoh5py-0.8.0rc3.tar.gz", hash = "sha256:2ec5a62423369d9a4f32b4b1529fa44dc54cb324e0754888fa05f3543d299a43"},
]
[package.dependencies]
@@ -435,6 +435,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -595,13 +605,13 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa
[[package]]
name = "platformdirs"
-version = "3.11.0"
+version = "4.0.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
- {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+ {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"},
+ {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"},
]
[package.extras]
@@ -643,17 +653,18 @@ math = ["numpy (>=1.7)", "vectormath (>=0.1.4)"]
[[package]]
name = "pygments"
-version = "2.16.1"
+version = "2.17.2"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.7"
files = [
- {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
- {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
]
[package.extras]
plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pylint"
@@ -923,13 +934,13 @@ files = [
[[package]]
name = "tomlkit"
-version = "0.12.1"
+version = "0.12.3"
description = "Style preserving TOML library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"},
- {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"},
+ {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"},
+ {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"},
]
[[package]]
@@ -945,18 +956,17 @@ files = [
[[package]]
name = "urllib3"
-version = "2.0.7"
+version = "2.1.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
- {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
+ {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"},
+ {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -991,4 +1001,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = "^3.8,<3.11"
-content-hash = "f183aa7aab16694b5bb76ce9d5989d8cc6c9caf3f98aeb6c94ba8832ebd2c79c"
+content-hash = "87a10ddaad4b980211b0798181db95a95516ded6d4dcbd4832ccb990f75d6e7d"
diff --git a/pylintrc b/pylintrc
index 42357c43..7ec2fca1 100644
--- a/pylintrc
+++ b/pylintrc
@@ -374,8 +374,8 @@ min-public-methods=1
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
-overgeneral-exceptions=BaseException,
- Exception
+overgeneral-exceptions=builtins.BaseException,
+ builtins.Exception
[FORMAT]
diff --git a/pyproject.toml b/pyproject.toml
index f1a354b0..49a77693 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,7 +33,7 @@ omf_to_geoh5 = 'omf.scripts.omf_to_geoh5:run'
[tool.poetry.dependencies]
python = "^3.8,<3.11"
-geoh5py = "~0.8.0rc2"
+geoh5py = {version = "~0.8.0rc2", allow-prereleases = true}
# geoh5py = {url = "https://github.com/MiraGeoscience/geoh5py/archive/refs/heads/release/0.8.0.zip#sha256="}
numpy = "~1.23.5"
properties = "~0.6.0"
diff --git a/tests/convert_volume_test.py b/tests/convert_volume_test.py
index 81f2052d..61dcc754 100644
--- a/tests/convert_volume_test.py
+++ b/tests/convert_volume_test.py
@@ -49,6 +49,25 @@ def test_volume_to_geoh5(tmp_path):
),
],
),
+ omf.MappedData(
+ name="Reference Data 2",
+ location="cells",
+ array=np.random.randint(-1, 3, np.prod(dims))
+ .flatten()
+ .astype(np.int32),
+ legends=[
+ omf.Legend(
+ values=omf.ColorArray(
+ array=[
+ [255, 0, 255],
+ [255, 255, 0],
+ [255, 0, 0],
+ ]
+ )
+ ),
+ omf.Legend(values=omf.StringArray(array=["abc", "123", "@#$%"])),
+ ],
+ ),
],
)
@@ -65,6 +84,15 @@ def test_volume_to_geoh5(tmp_path):
converter = omf.fileio.geoh5.get_conversion_map(block_model, workspace)
converted_omf = converter.from_geoh5(block_model)
+ # Compare reference data created two ways
+ ref_a = block_model.get_entity("Reference Data")[0]
+ ref_b = block_model.get_entity("Reference Data 2")[0]
+
+ assert all(
+ key in ref_a.value_map.map and value == ref_a.value_map.map[key]
+ for key, value in ref_b.value_map.map.items()
+ )
+
omf.fileio.utils.compare_elements(converted_omf, vol)
project = omf.fileio.geoh5.GeoH5Reader(file)()
diff --git a/tests/script_geoh5_to_omf_test.py b/tests/script_geoh5_to_omf_test.py
new file mode 100644
index 00000000..d310887b
--- /dev/null
+++ b/tests/script_geoh5_to_omf_test.py
@@ -0,0 +1,90 @@
+from pathlib import Path
+from unittest.mock import patch
+
+import numpy as np
+import pytest
+
+import omf
+from omf.scripts import geoh5_to_omf
+
+# pylint: disable=duplicate-code
+
+
+@pytest.fixture(
+ scope="module",
+ name="geoh5_input_path",
+ params=["test file.geoh5", "test_file.geoh5"],
+)
+def geoh5_input_path_fixture(request, tmp_path_factory) -> Path:
+ points = omf.PointSetElement(
+ name="Random Points",
+ description="Just random points",
+ geometry=omf.PointSetGeometry(vertices=np.random.rand(100, 3)),
+ data=[
+ omf.ScalarData(
+ name="rand data", array=np.random.randn(100), location="vertices"
+ ),
+ ],
+ )
+
+ file_path = tmp_path_factory.mktemp("input") / request.param
+ omf.OMFWriter(points, str(file_path))
+ return file_path
+
+
+def test_geoh5_to_omf_without_output_name(geoh5_input_path: Path):
+ """Test the geoh5_to_omf script."""
+
+ with patch("sys.argv", ["geoh5_to_omf", str(geoh5_input_path)]):
+ geoh5_to_omf.run()
+
+ assert (geoh5_input_path.with_suffix(".omf")).exists()
+
+
+@pytest.mark.parametrize(
+ "output_name", ["my_output.omf", "my output.omf", "my_output", "my output"]
+)
+def test_geoh5_to_omf_with_output_name(
+ tmp_path, monkeypatch, geoh5_input_path: Path, output_name: str
+):
+ """Test the geoh5_to_omf script."""
+
+ working_dir = tmp_path / "output"
+ working_dir.mkdir()
+ monkeypatch.chdir(working_dir)
+ with patch(
+ "sys.argv", ["geoh5_to_omf", str(geoh5_input_path), "-o", f"{output_name}"]
+ ):
+ geoh5_to_omf.run()
+
+ expected_output = working_dir / output_name
+ if not expected_output.suffix:
+ expected_output = expected_output.with_suffix(".omf")
+ assert expected_output.exists()
+
+
+@pytest.mark.parametrize(
+ "output_name", ["my_output.omf", "my output.omf", "my_output", "my output"]
+)
+def test_geoh5_to_omf_with_absolute_output_path(
+ tmp_path, geoh5_input_path: Path, output_name: str
+):
+ """Test the geoh5_to_omf script."""
+
+ output_dir = tmp_path / "output"
+ output_dir.mkdir()
+ with patch(
+ "sys.argv",
+ [
+ "geoh5_to_omf",
+ str(geoh5_input_path),
+ "-o",
+ f"{(output_dir / output_name).absolute()}",
+ ],
+ ):
+ geoh5_to_omf.run()
+
+ expected_output = output_dir / output_name
+ if not expected_output.suffix:
+ expected_output = expected_output.with_suffix(".omf")
+ assert expected_output.exists()
diff --git a/tests/script_omf_to_geoh5_test.py b/tests/script_omf_to_geoh5_test.py
new file mode 100644
index 00000000..aa3ba026
--- /dev/null
+++ b/tests/script_omf_to_geoh5_test.py
@@ -0,0 +1,150 @@
+from pathlib import Path
+from unittest.mock import patch
+
+import numpy as np
+import pytest
+
+import omf
+from omf.scripts import omf_to_geoh5
+
+
+@pytest.fixture(
+ scope="module", name="omf_input_path", params=["test file.omf", "test_file.omf"]
+)
+def omf_input_path_fixture(request, tmp_path_factory) -> Path:
+ omf_path = tmp_path_factory.mktemp("input") / request.param
+ create_omf_file(omf_path)
+ return omf_path
+
+
+def create_omf_file(omf_file_path: Path) -> None:
+ """Create an OMF file with random data."""
+ points = omf.PointSetElement(
+ name="Random Points",
+ description="Just random points",
+ geometry=omf.PointSetGeometry(vertices=np.random.rand(100, 3)),
+ data=[
+ omf.ScalarData(
+ name="rand data", array=np.random.randn(100), location="vertices"
+ ),
+ ],
+ )
+
+ omf.OMFWriter(points, str(omf_file_path))
+ assert omf_file_path.exists()
+
+
+def test_omf_to_geoh5_without_output_name(omf_input_path: Path):
+ """Test the omf_to_geoh5 script."""
+
+ with patch("sys.argv", ["omf_to_geoh5", str(omf_input_path)]):
+ omf_to_geoh5.run()
+
+ assert (omf_input_path.with_suffix(".geoh5")).exists()
+
+
+@pytest.mark.parametrize(
+ "output_name", ["my_output.geoh5", "my output.geoh5", "my_output", "my output"]
+)
+def test_omf_to_geoh5_with_output_name(
+ tmp_path: Path, monkeypatch, omf_input_path: Path, output_name: str
+):
+ """Test the omf_to_geoh5 script."""
+
+ working_dir = tmp_path / "output"
+ working_dir.mkdir()
+ monkeypatch.chdir(working_dir)
+ with patch(
+ "sys.argv", ["omf_to_geoh5", str(omf_input_path), "-o", f"{output_name}"]
+ ):
+ omf_to_geoh5.run()
+
+ expected_output = working_dir / output_name
+ if not expected_output.suffix:
+ expected_output = expected_output.with_suffix(".geoh5")
+ assert expected_output.exists()
+
+
+@pytest.mark.parametrize(
+ "output_name", ["my_output.geoh5", "my output.geoh5", "my_output", "my output"]
+)
+def test_omf_to_geoh5_with_absolute_output_path(
+ tmp_path: Path, omf_input_path: Path, output_name: str
+):
+ """Test the omf_to_geoh5 script."""
+
+ output_dir = tmp_path / "output"
+ output_dir.mkdir()
+ with patch(
+ "sys.argv",
+ [
+ "omf_to_geoh5",
+ str(omf_input_path),
+ "-o",
+ f"{(output_dir / output_name).absolute()}",
+ ],
+ ):
+ omf_to_geoh5.run()
+
+ expected_output = output_dir / output_name
+ if not expected_output.suffix:
+ expected_output = expected_output.with_suffix(".geoh5")
+ assert expected_output.exists()
+
+
+@pytest.mark.parametrize("gzip_level", range(0, 10))
+def test_omf_to_geoh5_with_gzip_level(tmp_path: Path, gzip_level: int):
+ """Test the omf_to_geoh5 script."""
+
+ omf_path = tmp_path / "test_file.omf"
+ create_omf_file(omf_path)
+ output_name = f"{omf_path.stem}_{gzip_level}.geoh5"
+ output_dir = tmp_path / "output"
+ output_path = output_dir / output_name
+ output_dir.mkdir()
+ with patch(
+ "sys.argv",
+ [
+ "omf_to_geoh5",
+ str(omf_path),
+ "--gzip",
+ f"{gzip_level}",
+ "-o",
+ f"{output_path.absolute()}",
+ ],
+ ):
+ omf_to_geoh5.run()
+
+ assert output_path.exists()
+
+
+def test_omf_to_geoh5_with_gzip_level_too_high(capsys, tmp_path: Path):
+ """Test the omf_to_geoh5 script."""
+
+ omf_path = tmp_path / "test_file.omf"
+ create_omf_file(omf_path)
+ output_name = omf_path.with_suffix(".geoh5").name
+ output_dir = tmp_path / "output"
+ output_path = output_dir / output_name
+ output_dir.mkdir()
+ with pytest.raises(SystemExit) as captured_exception:
+ with patch(
+ "sys.argv",
+ [
+ "omf_to_geoh5",
+ str(omf_path),
+ "--gzip",
+ "10",
+ "-o",
+ f"{output_path.absolute()}",
+ ],
+ ):
+ omf_to_geoh5.run()
+
+ assert not output_path.exists()
+ assert captured_exception.value.code == 2
+ captured_err = capsys.readouterr().err
+ assert any(
+ "error: argument --gzip: invalid choice: 10" in line
+ for line in captured_err.splitlines()
+ )