Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Feb 14, 2025
1 parent 4c59420 commit b67b15d
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 31 deletions.
2 changes: 1 addition & 1 deletion src/virtualship/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def mfp_to_yaml(excel_file_path: str, yaml_output_path: str):
f"Error: Found columns {list(actual_columns)}, but expected columns {list(expected_columns)}. "
"Are you sure that you're using the correct export from MFP?"
)

extra_columns = actual_columns - expected_columns
if extra_columns:
print(
Expand Down
65 changes: 35 additions & 30 deletions tests/test_mfp_to_yaml.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,27 @@
import pytest
import pandas as pd
import yaml
from unittest.mock import patch
from virtualship.utils import mfp_to_yaml

import pandas as pd
import pytest

from virtualship.expedition.instrument_type import InstrumentType
from virtualship.expedition.schedule import Schedule
from pathlib import Path

from virtualship.utils import mfp_to_yaml

# Sample correct MFP data
VALID_MFP_DATA = pd.DataFrame({
"Station Type": ["A", "B", "C"],
"Name": ["Station1", "Station2", "Station3"],
"Latitude": [30, 31, 32],
"Longitude": [-44, -45, -46],
"Instrument": ["CTD, DRIFTER", "ARGO_FLOAT", "XBT, CTD, DRIFTER"]
})
VALID_MFP_DATA = pd.DataFrame(
{
"Station Type": ["A", "B", "C"],
"Name": ["Station1", "Station2", "Station3"],
"Latitude": [30, 31, 32],
"Longitude": [-44, -45, -46],
"Instrument": ["CTD, DRIFTER", "ARGO_FLOAT", "XBT, CTD, DRIFTER"],
}
)

# Missing required columns
MISSING_HEADERS_DATA = pd.DataFrame({
"Station Type": ["A"],
"Name": ["Station1"],
"Latitude": [10.5]
})
MISSING_HEADERS_DATA = pd.DataFrame(
{"Station Type": ["A"], "Name": ["Station1"], "Latitude": [10.5]}
)

# Extra unexpected columns
EXTRA_HEADERS_DATA = VALID_MFP_DATA.copy()
Expand All @@ -32,12 +31,11 @@
@patch("pandas.read_excel", return_value=VALID_MFP_DATA)
def test_mfp_to_yaml_success(mock_read_excel, tmp_path):
"""Test that mfp_to_yaml correctly processes a valid MFP Excel file."""

yaml_output_path = tmp_path / "schedule.yaml"

# Run function (No need to mock open() for YAML, real file is created)
mfp_to_yaml("mock_file.xlsx", yaml_output_path)

# Ensure the YAML file was written
assert yaml_output_path.exists()

Expand All @@ -47,26 +45,30 @@ def test_mfp_to_yaml_success(mock_read_excel, tmp_path):
assert len(data.waypoints) == 3
assert data.waypoints[0].instrument == [InstrumentType.CTD, InstrumentType.DRIFTER]
assert data.waypoints[1].instrument == [InstrumentType.ARGO_FLOAT]
assert data.waypoints[2].instrument == [InstrumentType.XBT, InstrumentType.CTD, InstrumentType.DRIFTER]
assert data.waypoints[2].instrument == [
InstrumentType.XBT,
InstrumentType.CTD,
InstrumentType.DRIFTER,
]


@patch("pandas.read_excel", return_value=MISSING_HEADERS_DATA)
def test_mfp_to_yaml_missing_headers(mock_read_excel, tmp_path):
"""Test that mfp_to_yaml raises an error when required columns are missing."""

yaml_output_path = tmp_path / "schedule.yaml"

with pytest.raises(ValueError, match="Error: Found columns .* but expected columns .*"):

with pytest.raises(
ValueError, match="Error: Found columns .* but expected columns .*"
):
mfp_to_yaml("mock_file.xlsx", yaml_output_path)


@patch("pandas.read_excel", return_value=EXTRA_HEADERS_DATA)
@patch("builtins.print") # Capture printed warnings
def test_mfp_to_yaml_extra_headers(mock_print, mock_read_excel, tmp_path):
"""Test that mfp_to_yaml prints a warning when extra columns are found."""

yaml_output_path = tmp_path / "schedule.yaml"

# Run function
mfp_to_yaml("mock_file.xlsx", yaml_output_path)

Expand All @@ -82,9 +84,8 @@ def test_mfp_to_yaml_extra_headers(mock_print, mock_read_excel, tmp_path):
@patch("pandas.read_excel", return_value=VALID_MFP_DATA)
def test_mfp_to_yaml_instrument_conversion(mock_read_excel, tmp_path):
"""Test that instruments are correctly converted into InstrumentType enums."""

yaml_output_path = tmp_path / "schedule.yaml"

# Run function
mfp_to_yaml("mock_file.xlsx", yaml_output_path)

Expand All @@ -94,4 +95,8 @@ def test_mfp_to_yaml_instrument_conversion(mock_read_excel, tmp_path):
assert isinstance(data.waypoints[0].instrument, list)
assert data.waypoints[0].instrument == [InstrumentType.CTD, InstrumentType.DRIFTER]
assert data.waypoints[1].instrument == [InstrumentType.ARGO_FLOAT]
assert data.waypoints[2].instrument == [InstrumentType.XBT, InstrumentType.CTD, InstrumentType.DRIFTER]
assert data.waypoints[2].instrument == [
InstrumentType.XBT,
InstrumentType.CTD,
InstrumentType.DRIFTER,
]

0 comments on commit b67b15d

Please sign in to comment.