From 02c4860d25b482f7396f488f9cdafb6eb1107d1d Mon Sep 17 00:00:00 2001 From: Harsha Nori Date: Tue, 28 Jan 2025 16:28:02 -0800 Subject: [PATCH] 001101011100011100111 --- setup.py | 1 + .../test_tokenizers.py | 0 tests/unit/test_utils.py | 41 ------------------- 3 files changed, 1 insertion(+), 41 deletions(-) rename tests/{unit => model_integration}/test_tokenizers.py (100%) delete mode 100644 tests/unit/test_utils.py diff --git a/setup.py b/setup.py index da91515b2..8202508f9 100644 --- a/setup.py +++ b/setup.py @@ -64,6 +64,7 @@ "types-regex", "types-requests", "types-jsonschema", + "tokenizers", "requests", ] test_requires = [ diff --git a/tests/unit/test_tokenizers.py b/tests/model_integration/test_tokenizers.py similarity index 100% rename from tests/unit/test_tokenizers.py rename to tests/model_integration/test_tokenizers.py diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py deleted file mode 100644 index 3b337394d..000000000 --- a/tests/unit/test_utils.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Unit tests guidance._utils -""" - -from typing import Tuple - -import numpy as np -import pytest - -from guidance import _utils - - -@pytest.fixture(scope="module") -def atol() -> float: - return 1e-6 - - -@pytest.mark.parametrize( - "size_and_axis", - [ - ((32_000,), -1), # very next token logits - ((10, 32_000), -1), # many token's next-token logits - ((4, 10, 32_000), -1), # batch of texts - ], -) -class TestLogitsTransforms: - def test_log_softmax(self, size_and_axis, atol: float): - import torch - size, axis = size_and_axis - logits: np.ndarray = -np.random.uniform(low=0, high=60, size=size) - log_probs = _utils.log_softmax(logits, axis=axis) - log_probs_correct = torch.tensor(logits).log_softmax(dim=axis).numpy() - assert np.allclose(log_probs, log_probs_correct, atol=atol) - - def test_softmax(self, size_and_axis, atol: float): - import torch - size, axis = size_and_axis - logits: np.ndarray = -np.random.uniform(low=0, high=60, size=size) - probs = _utils.softmax(logits, axis=axis) - probs_correct = torch.tensor(logits).softmax(dim=axis).numpy() - assert np.allclose(probs, probs_correct, atol=atol)