Skip to content

Commit

Permalink
Make unit tests more robust.
Browse files Browse the repository at this point in the history
  • Loading branch information
jdangerx committed Jun 10, 2024
1 parent a771a09 commit 964a14e
Showing 1 changed file with 36 additions and 54 deletions.
90 changes: 36 additions & 54 deletions test/unit/workspace/datastore_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,36 +338,43 @@ def test_get_resource_with_nonexistent_resource_fails(self):
self.assertRaises(KeyError, self.fetcher.get_resource, res)


def test_get_zipfile_resource_retry(mocker):
def test_get_zipfile_resource_failure(mocker):
ds = datastore.Datastore()
not_a_zipfile = b"aaa"
ds.get_unique_resource = mocker.MagicMock(return_value=not_a_zipfile)
ds.get_unique_resource = mocker.MagicMock(return_value=b"")
sleep_mock = mocker.MagicMock()
with mocker.patch("time.sleep", sleep_mock), pytest.raises(zipfile.BadZipFile):
with (
mocker.patch("time.sleep", sleep_mock),
mocker.patch("zipfile.ZipFile", side_effect=zipfile.BadZipFile),
pytest.raises(zipfile.BadZipFile),
):
ds.get_zipfile_resource("test_dataset")

sleep_mock.assert_has_calls([mocker.call(2**x) for x in range(5)])
assert sleep_mock.call_count == 5


def test_get_zipfile_resource(mocker):
def test_get_zipfile_resource_eventual_success(mocker):
file_contents = "aaa"
zipfile_bytes = io.BytesIO()
with zipfile.ZipFile(zipfile_bytes, "w") as a_zipfile:
a_zipfile.writestr("file_name", file_contents)
zipfile_bytes.seek(0)
zipfile_bytestring = zipfile_bytes.read()

ds = datastore.Datastore()
ds.get_unique_resource = mocker.MagicMock(return_value=zipfile_bytestring)
ds.get_unique_resource = mocker.MagicMock(return_value=b"")
with (
ds.get_zipfile_resource("test_dataset") as observed_zipfile,
observed_zipfile.open("file_name") as test_file,
mocker.patch("time.sleep"),
mocker.patch(
"zipfile.ZipFile",
side_effect=[
zipfile.BadZipFile,
zipfile.BadZipFile,
zipfile.ZipFile(zipfile_bytes),
],
),
):
observed_zipfile = ds.get_zipfile_resource("test_dataset")
test_file = observed_zipfile.open("file_name")
assert test_file.read().decode(encoding="utf-8") == file_contents


def test_get_zipfile_resources(mocker):
def test_get_zipfile_resources_eventual_success(mocker):
file_contents = "aaa"
zipfile_bytes = io.BytesIO()
with zipfile.ZipFile(zipfile_bytes, "w") as a_zipfile:
Expand All @@ -388,49 +395,24 @@ def test_get_zipfile_resources(mocker):
]
)
)

observed_zipfiles = ds.get_zipfile_resources("test_dataset")
for _key, observed_zipfile in observed_zipfiles:
with observed_zipfile.open("file_name") as test_file:
assert test_file.read().decode(encoding="utf-8") == file_contents


def test_get_zipfile_resources_retry(mocker):
not_a_zipfile = io.BytesIO(b"aaa")
file_contents = "aaa"

zipfile_bytes = io.BytesIO()
with zipfile.ZipFile(zipfile_bytes, "w") as a_zipfile:
a_zipfile.writestr("file_name", file_contents)

ds = datastore.Datastore()
ds.get_resources = mocker.MagicMock(
return_value=iter(
[
(
PudlResourceKey("test_dataset_0", "test_doi", "test_name_0"),
zipfile_bytes,
),
(
PudlResourceKey("test_dataset_1", "test_doi", "test_name_1"),
not_a_zipfile,
),
]
)
)

sleep_mock = mocker.MagicMock()
with (
mocker.patch("time.sleep", sleep_mock),
mocker.patch(
"zipfile.ZipFile",
side_effect=[
zipfile.BadZipFile,
zipfile.BadZipFile,
zipfile.ZipFile(zipfile_bytes),
zipfile.BadZipFile,
zipfile.BadZipFile,
zipfile.ZipFile(zipfile_bytes),
],
),
mocker.patch("time.sleep"),
):
observed_zipfiles = ds.get_zipfile_resources("test_dataset")
_key, test_file = next(observed_zipfiles)
assert test_file.read("file_name").decode(encoding="utf-8") == file_contents
with pytest.raises(zipfile.BadZipFile):
next(observed_zipfiles)

sleep_mock.assert_has_calls([mocker.call(2**x) for x in range(5)])
assert sleep_mock.call_count == 5
for _key, observed_zipfile in observed_zipfiles:
with observed_zipfile.open("file_name") as test_file:
assert test_file.read().decode(encoding="utf-8") == file_contents


# TODO(rousik): add unit tests for Datasource class as well

0 comments on commit 964a14e

Please sign in to comment.