From 9a7a8e59de5bae90d1d2cecb97001d816b68bdca Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 09:55:34 +0100 Subject: [PATCH 01/22] Added app_utils tests --- swarm_copy_tests/app/test_app_utils.py | 75 +++++++++ swarm_copy_tests/app/test_config.py | 71 +++++++++ swarm_copy_tests/app/test_main.py | 0 swarm_copy_tests/conftest.py | 210 +++++++++++++++++++++++++ 4 files changed, 356 insertions(+) create mode 100644 swarm_copy_tests/app/test_app_utils.py create mode 100644 swarm_copy_tests/app/test_config.py create mode 100644 swarm_copy_tests/app/test_main.py create mode 100644 swarm_copy_tests/conftest.py diff --git a/swarm_copy_tests/app/test_app_utils.py b/swarm_copy_tests/app/test_app_utils.py new file mode 100644 index 0000000..70018f2 --- /dev/null +++ b/swarm_copy_tests/app/test_app_utils.py @@ -0,0 +1,75 @@ +"""Test app utils.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi.exceptions import HTTPException +from httpx import AsyncClient + +from swarm_copy.app.app_utils import setup_engine, validate_project +from swarm_copy.app.config import Settings + + +@pytest.mark.asyncio +async def test_validate_project(patch_required_env, httpx_mock, monkeypatch): + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + httpx_client = AsyncClient() + token = "fake_token" + test_vp = {"vlab_id": "test_vlab_DB", "project_id": "project_id_DB"} + vlab_url = "https://openbluebrain.com/api/virtual-lab-manager/virtual-labs" + + # test with bad config + httpx_mock.add_response( + url=f'{vlab_url}/{test_vp["vlab_id"]}/projects/{test_vp["project_id"]}', + status_code=404, + ) + with pytest.raises(HTTPException) as error: + await validate_project( + httpx_client=httpx_client, + vlab_id=test_vp["vlab_id"], + project_id=test_vp["project_id"], + token=token, + vlab_project_url=vlab_url, + ) + assert error.value.status_code == 401 + + # test with good config + httpx_mock.add_response( + url=f'{vlab_url}/{test_vp["vlab_id"]}/projects/{test_vp["project_id"]}', + json="test_project_ID", + ) + await validate_project( + httpx_client=httpx_client, + vlab_id=test_vp["vlab_id"], + project_id=test_vp["project_id"], + token=token, + vlab_project_url=vlab_url, + ) + # we jsut want to assert that the httpx_mock was called. + + +@patch("neuroagent.app.app_utils.create_async_engine") +def test_setup_engine(create_engine_mock, monkeypatch, patch_required_env): + create_engine_mock.return_value = AsyncMock() + + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") + + settings = Settings() + + connection_string = "postgresql+asyncpg://user:password@localhost/dbname" + retval = setup_engine(settings=settings, connection_string=connection_string) + assert retval is not None + + +@patch("neuroagent.app.app_utils.create_async_engine") +def test_setup_engine_no_connection_string( + create_engine_mock, monkeypatch, patch_required_env +): + create_engine_mock.return_value = AsyncMock() + + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") + + settings = Settings() + + retval = setup_engine(settings=settings, connection_string=None) + assert retval is None diff --git a/swarm_copy_tests/app/test_config.py b/swarm_copy_tests/app/test_config.py new file mode 100644 index 0000000..5274b9c --- /dev/null +++ b/swarm_copy_tests/app/test_config.py @@ -0,0 +1,71 @@ +"""Test config""" + +import pytest +from pydantic import ValidationError + +from swarm_copy.app.config import Settings + + +def test_required(monkeypatch, patch_required_env): + settings = Settings() + + assert settings.tools.literature.url == "https://fake_url" + assert settings.knowledge_graph.base_url == "https://fake_url/api/nexus/v1" + assert settings.openai.token.get_secret_value() == "dummy" + + # make sure not case sensitive + monkeypatch.delenv("NEUROAGENT_TOOLS__LITERATURE__URL") + monkeypatch.setenv("neuroagent_tools__literature__URL", "https://new_fake_url") + + settings = Settings() + assert settings.tools.literature.url == "https://new_fake_url" + + +def test_no_settings(): + # We get an error when no custom variables provided + with pytest.raises(ValidationError): + Settings() + + +def test_setup_tools(monkeypatch, patch_required_env): + monkeypatch.setenv("NEUROAGENT_TOOLS__TRACE__SEARCH_SIZE", "20") + monkeypatch.setenv("NEUROAGENT_TOOLS__MORPHO__SEARCH_SIZE", "20") + monkeypatch.setenv("NEUROAGENT_TOOLS__KG_MORPHO_FEATURES__SEARCH_SIZE", "20") + + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__USERNAME", "user") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__PASSWORD", "pass") + + settings = Settings() + + assert settings.tools.morpho.search_size == 20 + assert settings.tools.trace.search_size == 20 + assert settings.tools.kg_morpho_features.search_size == 20 + assert settings.keycloak.username == "user" + assert settings.keycloak.password.get_secret_value() == "pass" + + +def test_check_consistency(monkeypatch): + # We get an error when no custom variables provided + url = "https://fake_url" + monkeypatch.setenv("NEUROAGENT_TOOLS__LITERATURE__URL", url) + monkeypatch.setenv("NEUROAGENT_KNOWLEDGE_GRAPH__URL", url) + + with pytest.raises(ValueError): + Settings() + + monkeypatch.setenv("NEUROAGENT_GENERATIVE__OPENAI__TOKEN", "dummy") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + + with pytest.raises(ValueError): + Settings() + + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "false") + + with pytest.raises(ValueError): + Settings() + + monkeypatch.setenv("NEUROAGENT_KNOWLEDGE_GRAPH__BASE_URL", "http://fake_nexus.com") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__PASSWORD", "Hello") + + Settings() diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py new file mode 100644 index 0000000..e69de29 diff --git a/swarm_copy_tests/conftest.py b/swarm_copy_tests/conftest.py new file mode 100644 index 0000000..8acd252 --- /dev/null +++ b/swarm_copy_tests/conftest.py @@ -0,0 +1,210 @@ +"""Test configuration.""" + +import json +from pathlib import Path + +import pytest +import pytest_asyncio +from fastapi.testclient import TestClient +from httpx import AsyncClient +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import AIMessage +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine + +from neuroagent.app.config import Settings +from neuroagent.app.dependencies import get_kg_token, get_settings +from neuroagent.app.main import app +from neuroagent.tools import GetMorphoTool + + +@pytest.fixture(name="settings") +def settings(): + return Settings( + tools={ + "literature": { + "url": "fake_literature_url", + }, + }, + knowledge_graph={ + "base_url": "https://fake_url/api/nexus/v1", + }, + openai={ + "token": "fake_token", + }, + keycloak={ + "username": "fake_username", + "password": "fake_password", + }, + ) + + +@pytest.fixture(name="app_client") +def client_fixture(): + """Get client and clear app dependency_overrides.""" + app_client = TestClient(app) + test_settings = Settings( + tools={ + "literature": { + "url": "fake_literature_url", + }, + }, + knowledge_graph={ + "base_url": "https://fake_url/api/nexus/v1", + }, + openai={ + "token": "fake_token", + }, + keycloak={ + "username": "fake_username", + "password": "fake_password", + }, + ) + app.dependency_overrides[get_settings] = lambda: test_settings + # mock keycloak authentication + app.dependency_overrides[get_kg_token] = lambda: "fake_token" + yield app_client + app.dependency_overrides.clear() + + +@pytest.fixture(autouse=True, scope="session") +def dont_look_at_env_file(): + """Never look inside of the .env when running unit tests.""" + Settings.model_config["env_file"] = None + + +@pytest.fixture() +def patch_required_env(monkeypatch): + monkeypatch.setenv("NEUROAGENT_TOOLS__LITERATURE__URL", "https://fake_url") + monkeypatch.setenv( + "NEUROAGENT_KNOWLEDGE_GRAPH__BASE_URL", "https://fake_url/api/nexus/v1" + ) + monkeypatch.setenv("NEUROAGENT_OPENAI__TOKEN", "dummy") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "False") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__PASSWORD", "password") + + +@pytest_asyncio.fixture(params=["sqlite", "postgresql"], name="db_connection") +async def setup_sql_db(request, tmp_path): + db_type = request.param + + # To start the postgresql database: + # docker run -it --rm -p 5432:5432 -e POSTGRES_USER=test -e POSTGRES_PASSWORD=password postgres:latest + path = ( + f"sqlite+aiosqlite:///{tmp_path / 'test_db.db'}" + if db_type == "sqlite" + else "postgresql+asyncpg://test:password@localhost:5432" + ) + if db_type == "postgresql": + try: + async with create_async_engine(path).connect() as conn: + pass + except Exception: + pytest.skip("Postgres database not connected") + yield path + if db_type == "postgresql": + metadata = MetaData() + engine = create_async_engine(path) + session = AsyncSession(bind=engine) + async with engine.begin() as conn: + await conn.run_sync(metadata.reflect) + await conn.run_sync(metadata.drop_all) + + await session.commit() + await engine.dispose() + await session.aclose() + + +@pytest.fixture +def get_resolve_query_output(): + with open("tests/data/resolve_query.json") as f: + outputs = json.loads(f.read()) + return outputs + + +@pytest.fixture +def brain_region_json_path(): + br_path = Path(__file__).parent / "data" / "brainregion_hierarchy.json" + return br_path + + +@pytest.fixture +async def fake_llm_with_tools(brain_region_json_path): + class FakeFuntionChatModel(GenericFakeChatModel): + def bind_tools(self, functions: list): + return self + + def bind_functions(self, **kwargs): + return self + + # If you need another fake response to use different tools, + # you can do in your test + # ```python + # llm, _ = await anext(fake_llm_with_tools) + # llm.responses = my_fake_responses + # ``` + # and simply bind the corresponding tools + fake_responses = [ + AIMessage( + content="", + additional_kwargs={ + "tool_calls": [ + { + "index": 0, + "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", + "function": { + "arguments": '{"brain_region_id":"http://api.brain-map.org/api/v2/data/Structure/549"}', + "name": "get-morpho-tool", + }, + "type": "function", + } + ] + }, + response_metadata={"finish_reason": "tool_calls"}, + id="run-3828644d-197b-401b-8634-e6ecf01c2e7c-0", + tool_calls=[ + { + "name": "get-morpho-tool", + "args": { + "brain_region_id": ( + "http://api.brain-map.org/api/v2/data/Structure/549" + ) + }, + "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", + } + ], + ), + AIMessage( + content="Great answer", + response_metadata={"finish_reason": "stop"}, + id="run-42768b30-044a-4263-8c5c-da61429aa9da-0", + ), + ] + + # If you use this tool in your test, DO NOT FORGET to mock the url response with the following snippet: + # + # ```python + # json_path = Path(__file__).resolve().parent.parent / "data" / "knowledge_graph.json" + # with open(json_path) as f: + # knowledge_graph_response = json.load(f) + + # httpx_mock.add_response( + # url="http://fake_url", + # json=knowledge_graph_response, + # ) + # ``` + # The http call is not mocked here because one might want to change the responses + # and the tools used. + async_client = AsyncClient() + tool = GetMorphoTool( + metadata={ + "url": "http://fake_url", + "search_size": 2, + "httpx_client": async_client, + "token": "fake_token", + "brainregion_path": brain_region_json_path, + } + ) + + yield FakeFuntionChatModel(messages=iter(fake_responses)), [tool], fake_responses + await async_client.aclose() From cdd67c8801885975a4f0aa5f5b037e66fb6035ba Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 09:57:50 +0100 Subject: [PATCH 02/22] Added test_dependencies --- swarm_copy_tests/app/test_main.py | 22 ++++++++++++++++++++++ tests/app/test_dependencies.py | 27 +++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py index e69de29..54f3def 100644 --- a/swarm_copy_tests/app/test_main.py +++ b/swarm_copy_tests/app/test_main.py @@ -0,0 +1,22 @@ +from swarm_copy.app.dependencies import get_settings +from swarm_copy.app.main import app + + +def test_settings_endpoint(app_client, dont_look_at_env_file): + settings = app.dependency_overrides[get_settings]() + response = app_client.get("/settings") + + replace_secretstr = settings.model_dump() + replace_secretstr["keycloak"]["password"] = "**********" + replace_secretstr["openai"]["token"] = "**********" + assert response.json() == replace_secretstr + + +def test_readyz(app_client): + response = app_client.get( + "/", + ) + + body = response.json() + assert isinstance(body, dict) + assert body["status"] == "ok" diff --git a/tests/app/test_dependencies.py b/tests/app/test_dependencies.py index b79a0e9..e3a23b6 100644 --- a/tests/app/test_dependencies.py +++ b/tests/app/test_dependencies.py @@ -574,6 +574,33 @@ def test_get_connection_string_no_prefix(monkeypatch, patch_required_env): assert result is None, "should return None when prefix is not set" +<<<<<<< Updated upstream +======= +def test_get_engine(monkeypatch, patch_required_env): + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") + + settings = Settings() + + connection_string = "https://localhost" + retval = get_engine(settings=settings, connection_string=connection_string) + assert retval is not None + + +@patch("neuroagent.app.dependencies.create_engine") +def test_get_engine_no_connection_string( + create_engine_mock, monkeypatch, patch_required_env +): + create_engine_mock.return_value = Mock() + + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") + + settings = Settings() + + retval = get_engine(settings=settings, connection_string=None) + assert retval is None + + +>>>>>>> Stashed changes @patch("sqlalchemy.orm.Session") @pytest.mark.asyncio async def test_get_session_success(_): From 102cbcbd6227764aafb4341fcf41b15a79cf6f89 Mon Sep 17 00:00:00 2001 From: cszsol Date: Wed, 20 Nov 2024 10:28:46 +0100 Subject: [PATCH 03/22] Update test_dependencies.py --- tests/app/test_dependencies.py | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/tests/app/test_dependencies.py b/tests/app/test_dependencies.py index e3a23b6..b79a0e9 100644 --- a/tests/app/test_dependencies.py +++ b/tests/app/test_dependencies.py @@ -574,33 +574,6 @@ def test_get_connection_string_no_prefix(monkeypatch, patch_required_env): assert result is None, "should return None when prefix is not set" -<<<<<<< Updated upstream -======= -def test_get_engine(monkeypatch, patch_required_env): - monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") - - settings = Settings() - - connection_string = "https://localhost" - retval = get_engine(settings=settings, connection_string=connection_string) - assert retval is not None - - -@patch("neuroagent.app.dependencies.create_engine") -def test_get_engine_no_connection_string( - create_engine_mock, monkeypatch, patch_required_env -): - create_engine_mock.return_value = Mock() - - monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "prefix") - - settings = Settings() - - retval = get_engine(settings=settings, connection_string=None) - assert retval is None - - ->>>>>>> Stashed changes @patch("sqlalchemy.orm.Session") @pytest.mark.asyncio async def test_get_session_success(_): From fa56ea6aee597d853acdea69ab513e9bf86d90b9 Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 10:31:15 +0100 Subject: [PATCH 04/22] Conflict resolution --- tests/app/test_dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/test_dependencies.py b/tests/app/test_dependencies.py index b79a0e9..a16be34 100644 --- a/tests/app/test_dependencies.py +++ b/tests/app/test_dependencies.py @@ -41,7 +41,7 @@ get_update_kg_hierarchy, get_user_id, get_vlab_and_project, - validate_project, + validate_project ) from neuroagent.app.routers.database.schemas import Base, Threads from neuroagent.tools import ( From 630132516073c8c0ed885133151c72b3d5db7750 Mon Sep 17 00:00:00 2001 From: cszsol Date: Wed, 20 Nov 2024 10:31:44 +0100 Subject: [PATCH 05/22] Update test_dependencies.py --- tests/app/test_dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/test_dependencies.py b/tests/app/test_dependencies.py index a16be34..b79a0e9 100644 --- a/tests/app/test_dependencies.py +++ b/tests/app/test_dependencies.py @@ -41,7 +41,7 @@ get_update_kg_hierarchy, get_user_id, get_vlab_and_project, - validate_project + validate_project, ) from neuroagent.app.routers.database.schemas import Base, Threads from neuroagent.tools import ( From 0c7c416165c5f2c52b564369e1dec42ffe419efd Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 10:37:21 +0100 Subject: [PATCH 06/22] run swarm copy tests --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d8becc5..0347c9b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,4 +90,4 @@ jobs: mypy src/ swarm_copy/ # Include src/ directory in Python path to prioritize local files in pytest export PYTHONPATH=$(pwd)/src:$PYTHONPATH - pytest --color=yes + pytest --color=yes tests/ swarm_copy_tests/ From 3416a0f54f4da18cf5b50023a6ede0fbe9e7df9b Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 10:38:43 +0100 Subject: [PATCH 07/22] Added test_dependencies --- swarm_copy_tests/app/test_dependencies.py | 171 ++++++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 swarm_copy_tests/app/test_dependencies.py diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py new file mode 100644 index 0000000..20bfb7d --- /dev/null +++ b/swarm_copy_tests/app/test_dependencies.py @@ -0,0 +1,171 @@ +"""Test dependencies.""" + +import json +import os +from pathlib import Path +from typing import AsyncIterator +from unittest.mock import Mock + +import pytest +from httpx import AsyncClient + +from swarm_copy.app.dependencies import ( + Settings, + get_cell_types_kg_hierarchy, + get_connection_string, + get_httpx_client, + get_settings, + get_update_kg_hierarchy, + get_user_id, +) + + +def test_get_settings(patch_required_env): + settings = get_settings() + assert settings.tools.literature.url == "https://fake_url" + assert settings.knowledge_graph.url == "https://fake_url/api/nexus/v1/search/query/" + + +@pytest.mark.asyncio +async def test_get_httpx_client(): + request = Mock() + request.headers = {"x-request-id": "greatid"} + httpx_client_iterator = get_httpx_client(request=request) + assert isinstance(httpx_client_iterator, AsyncIterator) + async for httpx_client in httpx_client_iterator: + assert isinstance(httpx_client, AsyncClient) + assert httpx_client.headers["x-request-id"] == "greatid" + + +@pytest.mark.asyncio +async def test_get_user(httpx_mock, monkeypatch, patch_required_env): + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__USERNAME", "fake_username") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__PASSWORD", "fake_password") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__ISSUER", "https://great_issuer.com") + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + + fake_response = { + "sub": "12345", + "email_verified": False, + "name": "Machine Learning Test User", + "groups": [], + "preferred_username": "sbo-ml", + "given_name": "Machine Learning", + "family_name": "Test User", + "email": "email@epfl.ch", + } + httpx_mock.add_response( + url="https://great_issuer.com/protocol/openid-connect/userinfo", + json=fake_response, + ) + + settings = Settings() + client = AsyncClient() + token = "eyJgreattoken" + user_id = await get_user_id(token=token, settings=settings, httpx_client=client) + + assert user_id == fake_response["sub"] + + +@pytest.mark.asyncio +async def test_get_update_kg_hierarchy( + tmp_path, httpx_mock, monkeypatch, patch_required_env +): + token = "fake_token" + file_name = "fake_file" + client = AsyncClient() + + file_url = "https://fake_file_url" + + monkeypatch.setenv( + "NEUROAGENT_KNOWLEDGE_GRAPH__HIERARCHY_URL", "http://fake_hierarchy_url.com" + ) + + settings = Settings( + knowledge_graph={"br_saving_path": tmp_path / "test_brain_region.json"} + ) + + json_response_url = { + "head": {"vars": ["file_url"]}, + "results": {"bindings": [{"file_url": {"type": "uri", "value": file_url}}]}, + } + with open( + Path(__file__).parent.parent.parent + / "tests" + / "data" + / "KG_brain_regions_hierarchy_test.json" + ) as fh: + json_response_file = json.load(fh) + + httpx_mock.add_response( + url=settings.knowledge_graph.sparql_url, json=json_response_url + ) + httpx_mock.add_response(url=file_url, json=json_response_file) + + await get_update_kg_hierarchy( + token, + client, + settings, + file_name, + ) + + assert os.path.exists(settings.knowledge_graph.br_saving_path) + + +@pytest.mark.asyncio +async def test_get_cell_types_kg_hierarchy( + tmp_path, httpx_mock, monkeypatch, patch_required_env +): + token = "fake_token" + file_name = "fake_file" + client = AsyncClient() + + file_url = "https://fake_file_url" + monkeypatch.setenv( + "NEUROAGENT_KNOWLEDGE_GRAPH__HIERARCHY_URL", "http://fake_hierarchy_url.com" + ) + + settings = Settings( + knowledge_graph={"ct_saving_path": tmp_path / "test_cell_types_region.json"} + ) + + json_response_url = { + "head": {"vars": ["file_url"]}, + "results": {"bindings": [{"file_url": {"type": "uri", "value": file_url}}]}, + } + with open( + Path(__file__).parent.parent.parent + / "tests" + / "data" + / "kg_cell_types_hierarchy_test.json" + ) as fh: + json_response_file = json.load(fh) + + httpx_mock.add_response( + url=settings.knowledge_graph.sparql_url, json=json_response_url + ) + httpx_mock.add_response(url=file_url, json=json_response_file) + + await get_cell_types_kg_hierarchy( + token, + client, + settings, + file_name, + ) + + assert os.path.exists(settings.knowledge_graph.ct_saving_path) + + +def test_get_connection_string_full(monkeypatch, patch_required_env): + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", "http://") + monkeypatch.setenv("NEUROAGENT_DB__USER", "John") + monkeypatch.setenv("NEUROAGENT_DB__PASSWORD", "Doe") + monkeypatch.setenv("NEUROAGENT_DB__HOST", "localhost") + monkeypatch.setenv("NEUROAGENT_DB__PORT", "5000") + monkeypatch.setenv("NEUROAGENT_DB__NAME", "test") + + settings = Settings() + result = get_connection_string(settings) + assert ( + result == "http://John:Doe@localhost:5000/test" + ), "must return fully formed connection string" From 8a0bc33ad7374cf80e32fa498d7f05765bbf3fc9 Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 13:04:42 +0100 Subject: [PATCH 08/22] Fixed breaking changes --- swarm_copy/tools/bluenaas_memodel_getall.py | 4 ++-- swarm_copy/tools/bluenaas_memodel_getone.py | 4 ++-- swarm_copy/tools/traces_tool.py | 3 ++- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index 8bda00e..cdf55ff 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -29,7 +29,7 @@ class InputMEModelGetAll(BaseModel): page_size: int = Field( default=20, description="Number of results returned by the API." ) - model_type: Literal["single-neuron-simulation", "synaptome-simulation"] = Field( + simulation_type: Literal["single-neuron-simulation", "synaptome-simulation"] = Field( default="single-neuron-simulation", description="Type of simulation to retrieve.", ) @@ -55,7 +55,7 @@ async def arun(self) -> PaginatedResponseUnionMEModelResponseSynaptomeModelRespo response = await self.metadata.httpx_client.get( url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/me-models", params={ - "simulation_type": self.input_schema.model_type, + "simulation_type": self.input_schema.simulation_type, "offset": self.input_schema.offset, "page_size": self.input_schema.page_size, }, diff --git a/swarm_copy/tools/bluenaas_memodel_getone.py b/swarm_copy/tools/bluenaas_memodel_getone.py index 4f4a3b3..70774b0 100644 --- a/swarm_copy/tools/bluenaas_memodel_getone.py +++ b/swarm_copy/tools/bluenaas_memodel_getone.py @@ -24,7 +24,7 @@ class MEModelGetOneMetadata(BaseMetadata): class InputMEModelGetOne(BaseModel): """Inputs for the BlueNaaS single-neuron simulation.""" - model_id: str = Field( + simulation_id: str = Field( description="ID of the model to retrieve. Should be an https link." ) @@ -45,7 +45,7 @@ async def arun(self) -> MEModelResponse: ) response = await self.metadata.httpx_client.get( - url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/{quote_plus(self.input_schema.model_id)}", + url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/{quote_plus(self.input_schema.simulation_id)}", headers={"Authorization": f"Bearer {self.metadata.token}"}, ) diff --git a/swarm_copy/tools/traces_tool.py b/swarm_copy/tools/traces_tool.py index 41028b2..0434013 100644 --- a/swarm_copy/tools/traces_tool.py +++ b/swarm_copy/tools/traces_tool.py @@ -1,6 +1,7 @@ """Traces tool.""" import logging +from pathlib import Path from typing import Any, ClassVar from pydantic import BaseModel, Field @@ -46,7 +47,7 @@ class GetTracesMetadata(BaseMetadata): knowledge_graph_url: str token: str trace_search_size: int - brainregion_path: str + brainregion_path: str | Path class GetTracesTool(BaseTool): From 66b48bc46f616e712b8a0bc486690126c4acd689 Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 13:22:56 +0100 Subject: [PATCH 09/22] Fixed settings test --- swarm_copy_tests/app/test_main.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py index 54f3def..2af6526 100644 --- a/swarm_copy_tests/app/test_main.py +++ b/swarm_copy_tests/app/test_main.py @@ -1,9 +1,4 @@ -from swarm_copy.app.dependencies import get_settings -from swarm_copy.app.main import app - - -def test_settings_endpoint(app_client, dont_look_at_env_file): - settings = app.dependency_overrides[get_settings]() +def test_settings_endpoint(app_client, dont_look_at_env_file, settings): response = app_client.get("/settings") replace_secretstr = settings.model_dump() From 5153c007b60e7852e395769cc1e0104e83c444be Mon Sep 17 00:00:00 2001 From: cszsolnai Date: Wed, 20 Nov 2024 13:25:15 +0100 Subject: [PATCH 10/22] lint --- CHANGELOG.md | 1 + swarm_copy/tools/bluenaas_memodel_getall.py | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 52955bc..eb933fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Tool implementations without langchain or langgraph dependencies - CRUDs. - BlueNaas CRUD tools +- app unit tests ### Fixed - Migrate LLM Evaluation logic to scripts and add tests diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index cdf55ff..db2501e 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -29,9 +29,11 @@ class InputMEModelGetAll(BaseModel): page_size: int = Field( default=20, description="Number of results returned by the API." ) - simulation_type: Literal["single-neuron-simulation", "synaptome-simulation"] = Field( - default="single-neuron-simulation", - description="Type of simulation to retrieve.", + simulation_type: Literal["single-neuron-simulation", "synaptome-simulation"] = ( + Field( + default="single-neuron-simulation", + description="Type of simulation to retrieve.", + ) ) From 9a77d377a4d5e903d0e745625e9747306ec533b5 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Thu, 5 Dec 2024 10:31:53 +0100 Subject: [PATCH 11/22] Remove unnecessary dependencies --- swarm_copy/tools/bluenaas_memodel_getall.py | 2 +- swarm_copy/tools/bluenaas_memodel_getone.py | 4 +- tests/conftest.py | 92 +-------------------- 3 files changed, 7 insertions(+), 91 deletions(-) diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index db2501e..bc42146 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -29,7 +29,7 @@ class InputMEModelGetAll(BaseModel): page_size: int = Field( default=20, description="Number of results returned by the API." ) - simulation_type: Literal["single-neuron-simulation", "synaptome-simulation"] = ( + memodel_type: Literal["single-neuron-simulation", "synaptome-simulation"] = ( Field( default="single-neuron-simulation", description="Type of simulation to retrieve.", diff --git a/swarm_copy/tools/bluenaas_memodel_getone.py b/swarm_copy/tools/bluenaas_memodel_getone.py index 70774b0..f84acfa 100644 --- a/swarm_copy/tools/bluenaas_memodel_getone.py +++ b/swarm_copy/tools/bluenaas_memodel_getone.py @@ -24,7 +24,7 @@ class MEModelGetOneMetadata(BaseMetadata): class InputMEModelGetOne(BaseModel): """Inputs for the BlueNaaS single-neuron simulation.""" - simulation_id: str = Field( + memodel_id: str = Field( description="ID of the model to retrieve. Should be an https link." ) @@ -45,7 +45,7 @@ async def arun(self) -> MEModelResponse: ) response = await self.metadata.httpx_client.get( - url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/{quote_plus(self.input_schema.simulation_id)}", + url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/{quote_plus(self.input_schema.memodel_id)}", headers={"Authorization": f"Bearer {self.metadata.token}"}, ) diff --git a/tests/conftest.py b/tests/conftest.py index 88ee7ea..dc7e6e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,15 +7,13 @@ import pytest_asyncio from fastapi.testclient import TestClient from httpx import AsyncClient -from langchain_core.language_models.fake_chat_models import GenericFakeChatModel -from langchain_core.messages import AIMessage from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from neuroagent.app.config import Settings -from neuroagent.app.dependencies import get_kg_token, get_settings -from neuroagent.app.main import app -from neuroagent.tools import GetMorphoTool +from swarm_copy.app.config import Settings +from swarm_copy.app.dependencies import get_kg_token, get_settings +from swarm_copy.app.main import app +from swarm_copy.tools import GetMorphoTool @pytest.fixture(name="app_client") @@ -105,85 +103,3 @@ def get_resolve_query_output(): def brain_region_json_path(): br_path = Path(__file__).parent / "data" / "brainregion_hierarchy.json" return br_path - - -@pytest.fixture -async def fake_llm_with_tools(brain_region_json_path): - class FakeFuntionChatModel(GenericFakeChatModel): - def bind_tools(self, functions: list): - return self - - def bind_functions(self, **kwargs): - return self - - # If you need another fake response to use different tools, - # you can do in your test - # ```python - # llm, _ = await anext(fake_llm_with_tools) - # llm.responses = my_fake_responses - # ``` - # and simply bind the corresponding tools - fake_responses = [ - AIMessage( - content="", - additional_kwargs={ - "tool_calls": [ - { - "index": 0, - "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", - "function": { - "arguments": '{"brain_region_id":"http://api.brain-map.org/api/v2/data/Structure/549"}', - "name": "get-morpho-tool", - }, - "type": "function", - } - ] - }, - response_metadata={"finish_reason": "tool_calls"}, - id="run-3828644d-197b-401b-8634-e6ecf01c2e7c-0", - tool_calls=[ - { - "name": "get-morpho-tool", - "args": { - "brain_region_id": ( - "http://api.brain-map.org/api/v2/data/Structure/549" - ) - }, - "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", - } - ], - ), - AIMessage( - content="Great answer", - response_metadata={"finish_reason": "stop"}, - id="run-42768b30-044a-4263-8c5c-da61429aa9da-0", - ), - ] - - # If you use this tool in your test, DO NOT FORGET to mock the url response with the following snippet: - # - # ```python - # json_path = Path(__file__).resolve().parent.parent / "data" / "knowledge_graph.json" - # with open(json_path) as f: - # knowledge_graph_response = json.load(f) - - # httpx_mock.add_response( - # url="http://fake_url", - # json=knowledge_graph_response, - # ) - # ``` - # The http call is not mocked here because one might want to change the responses - # and the tools used. - async_client = AsyncClient() - tool = GetMorphoTool( - metadata={ - "url": "http://fake_url", - "search_size": 2, - "httpx_client": async_client, - "token": "fake_token", - "brainregion_path": brain_region_json_path, - } - ) - - yield FakeFuntionChatModel(messages=iter(fake_responses)), [tool], fake_responses - await async_client.aclose() From 6ab7fd897997d7640f87b8173191909730b03ed9 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 10:21:25 +0100 Subject: [PATCH 12/22] Added get_vlab_and_project tests --- swarm_copy/app/dependencies.py | 5 +- swarm_copy_tests/app/test_dependencies.py | 198 +++++++++++++++++++++- 2 files changed, 199 insertions(+), 4 deletions(-) diff --git a/swarm_copy/app/dependencies.py b/swarm_copy/app/dependencies.py index f021c3e..27e7452 100644 --- a/swarm_copy/app/dependencies.py +++ b/swarm_copy/app/dependencies.py @@ -195,6 +195,7 @@ async def get_vlab_and_project( } else: thread_id = request.path_params.get("thread_id") + logging.error(f"*** Thread id: {thread_id}, User id: {user_id} ***") thread_result = await session.execute( select(Threads).where( Threads.user_id == user_id, Threads.thread_id == thread_id @@ -204,9 +205,7 @@ async def get_vlab_and_project( if not thread: raise HTTPException( status_code=404, - detail={ - "detail": "Thread not found.", - }, + detail="Thread not found.", ) if thread and thread.vlab_id and thread.project_id: vlab_and_project = { diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py index 20bfb7d..3a87480 100644 --- a/swarm_copy_tests/app/test_dependencies.py +++ b/swarm_copy_tests/app/test_dependencies.py @@ -8,7 +8,10 @@ import pytest from httpx import AsyncClient +from fastapi import Request, HTTPException +from swarm_copy.app.app_utils import setup_engine +from swarm_copy.app.database.sql_schemas import Base, Threads from swarm_copy.app.dependencies import ( Settings, get_cell_types_kg_hierarchy, @@ -16,7 +19,7 @@ get_httpx_client, get_settings, get_update_kg_hierarchy, - get_user_id, + get_user_id, get_session, get_vlab_and_project, ) @@ -169,3 +172,196 @@ def test_get_connection_string_full(monkeypatch, patch_required_env): assert ( result == "http://John:Doe@localhost:5000/test" ), "must return fully formed connection string" + + + +@pytest.mark.asyncio +@pytest.mark.httpx_mock(can_send_already_matched_responses=True) +async def test_get_vlab_and_project( + patch_required_env, httpx_mock, db_connection, monkeypatch +): + # Setup DB with one thread to do the tests + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + test_settings = Settings( + db={"prefix": db_connection}, + ) + engine = setup_engine(test_settings, db_connection) + session = await anext(get_session(engine)) + user_id = "Super_user" + token = "fake_token" + httpx_client = AsyncClient() + httpx_mock.add_response( + url=f"{test_settings.virtual_lab.get_project_url}/test_vlab/projects/test_project", + json="test_project_ID", + ) + + # create test thread table + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + new_thread = Threads( + user_id=user_id, + vlab_id="test_vlab_DB", + project_id="project_id_DB", + title="test_title", + ) + session.add(new_thread) + await session.commit() + await session.refresh(new_thread) + + try: + # Test with info in headers. + good_request_headers = Request( + scope={ + "type": "http", + "method": "Get", + "url": "http://fake_url/thread_id", + "headers": [ + (b"x-virtual-lab-id", b"test_vlab"), + (b"x-project-id", b"test_project"), + ], + }, + ) + ids = await get_vlab_and_project( + user_id=user_id, + session=session, + request=good_request_headers, + settings=test_settings, + token=token, + httpx_client=httpx_client, + ) + assert ids == {"vlab_id": "test_vlab", "project_id": "test_project"} + finally: + # don't forget to close the session, otherwise the tests hangs. + await session.close() + await engine.dispose() + + +@pytest.mark.asyncio +@pytest.mark.httpx_mock(can_send_already_matched_responses=True) +async def test_get_vlab_and_project_no_info_in_headers( + patch_required_env, httpx_mock, db_connection, monkeypatch +): + # Setup DB with one thread to do the tests + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + test_settings = Settings( + db={"prefix": db_connection}, + ) + engine = setup_engine(test_settings, db_connection) + session = await anext(get_session(engine)) + user_id = "Super_user" + token = "fake_token" + httpx_client = AsyncClient() + + # create test thread table + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + new_thread = Threads( + user_id=user_id, + vlab_id="test_vlab_DB", + project_id="project_id_DB", + title="test_title", + ) + session.add(new_thread) + await session.commit() + await session.refresh(new_thread) + + try: + # Test with no infos in headers. + bad_request = Request( + scope={ + "type": "http", + "method": "GET", + "scheme": "http", + "server": ("example.com", 80), + "path_params": {"dummy_patram": "fake_thread_id"}, + "headers": [ + (b"wong_header", b"wrong value"), + ], + } + ) + with pytest.raises(HTTPException) as error: + await get_vlab_and_project( + user_id=user_id, + session=session, + request=bad_request, + settings=test_settings, + token=token, + httpx_client=httpx_client, + ) + assert ( + error.value.detail == "Thread not found." + ) + finally: + # don't forget to close the session, otherwise the tests hangs. + await session.close() + await engine.dispose() + + +@pytest.mark.asyncio +@pytest.mark.httpx_mock(can_send_already_matched_responses=True) +async def test_get_vlab_and_project_valid_thread_id( + patch_required_env, httpx_mock, db_connection, monkeypatch +): + # Setup DB with one thread to do the tests + monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") + test_settings = Settings( + db={"prefix": db_connection}, + ) + engine = setup_engine(test_settings, db_connection) + session = await anext(get_session(engine)) + user_id = "Super_user" + token = "fake_token" + httpx_client = AsyncClient() + httpx_mock.add_response( + url=f"{test_settings.virtual_lab.get_project_url}/test_vlab/projects/test_project", + json="test_project_ID", + ) + httpx_mock.add_response( + url=f"{test_settings.virtual_lab.get_project_url}/test_vlab_DB/projects/project_id_DB", + json="test_project_ID", + ) + + # create test thread table + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + new_thread = Threads( + user_id=user_id, + vlab_id="test_vlab_DB", + project_id="project_id_DB", + title="test_title", + ) + session.add(new_thread) + await session.commit() + await session.refresh(new_thread) + + try: + # Test with no infos in headers, but valid thread_ID. + good_request_DB = Request( + scope={ + "type": "http", + "method": "GET", + "scheme": "http", + "server": ("example.com", 80), + "path_params": {"thread_id": new_thread.thread_id}, + "headers": [ + (b"wong_header", b"wrong value"), + ], + } + ) + ids_from_DB = await get_vlab_and_project( + user_id=user_id, + session=session, + request=good_request_DB, + settings=test_settings, + token=token, + httpx_client=httpx_client, + ) + assert ids_from_DB == {"vlab_id": "test_vlab_DB", "project_id": "project_id_DB"} + + finally: + # don't forget to close the session, otherwise the tests hangs. + await session.close() + await engine.dispose() From fa9c4a397292855f983a217440f086415b5de693 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 10:37:34 +0100 Subject: [PATCH 13/22] Added test for get starting agent --- swarm_copy_tests/app/test_dependencies.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py index 3a87480..5dcdc13 100644 --- a/swarm_copy_tests/app/test_dependencies.py +++ b/swarm_copy_tests/app/test_dependencies.py @@ -19,8 +19,9 @@ get_httpx_client, get_settings, get_update_kg_hierarchy, - get_user_id, get_session, get_vlab_and_project, + get_user_id, get_session, get_vlab_and_project, get_starting_agent, ) +from swarm_copy.new_types import Agent def test_get_settings(patch_required_env): @@ -238,7 +239,6 @@ async def test_get_vlab_and_project( @pytest.mark.asyncio -@pytest.mark.httpx_mock(can_send_already_matched_responses=True) async def test_get_vlab_and_project_no_info_in_headers( patch_required_env, httpx_mock, db_connection, monkeypatch ): @@ -365,3 +365,10 @@ async def test_get_vlab_and_project_valid_thread_id( # don't forget to close the session, otherwise the tests hangs. await session.close() await engine.dispose() + + +def test_get_starting_agent(patch_required_env, monkeypatch): + settings = Settings() + agent = get_starting_agent(None, settings) + + assert isinstance(agent, Agent) From 93cb650c178d7683c2f6ab16ff5bcda87c0fdf8e Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 11:41:36 +0100 Subject: [PATCH 14/22] Added test for get_kg_token --- swarm_copy_tests/app/test_dependencies.py | 26 +++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py index 5dcdc13..36390b0 100644 --- a/swarm_copy_tests/app/test_dependencies.py +++ b/swarm_copy_tests/app/test_dependencies.py @@ -4,7 +4,7 @@ import os from pathlib import Path from typing import AsyncIterator -from unittest.mock import Mock +from unittest.mock import Mock, patch import pytest from httpx import AsyncClient @@ -19,7 +19,7 @@ get_httpx_client, get_settings, get_update_kg_hierarchy, - get_user_id, get_session, get_vlab_and_project, get_starting_agent, + get_user_id, get_session, get_vlab_and_project, get_starting_agent, get_kg_token, ) from swarm_copy.new_types import Agent @@ -240,7 +240,7 @@ async def test_get_vlab_and_project( @pytest.mark.asyncio async def test_get_vlab_and_project_no_info_in_headers( - patch_required_env, httpx_mock, db_connection, monkeypatch + patch_required_env, db_connection, monkeypatch ): # Setup DB with one thread to do the tests monkeypatch.setenv("NEUROAGENT_KEYCLOAK__VALIDATE_TOKEN", "true") @@ -367,8 +367,26 @@ async def test_get_vlab_and_project_valid_thread_id( await engine.dispose() -def test_get_starting_agent(patch_required_env, monkeypatch): +def test_get_starting_agent(patch_required_env): settings = Settings() agent = get_starting_agent(None, settings) assert isinstance(agent, Agent) + + +@pytest.mark.parametrize( + "input_token, expected_token", + [ + ("existing_token", "existing_token"), + (None, "new_token"), + ], +) +def test_get_kg_token(patch_required_env, input_token, expected_token): + settings = Settings() + mock = Mock() + mock.token.return_value = {"access_token": expected_token} + with ( + patch("swarm_copy.app.dependencies.KeycloakOpenID", return_value=mock), + ): + result = get_kg_token(settings, input_token) + assert result == expected_token From 7301d4e3fe63c4e08530b3a6c4c9f2fc7b577d6d Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 11:53:06 +0100 Subject: [PATCH 15/22] Added test lifespan --- swarm_copy_tests/app/test_main.py | 58 +++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py index 2af6526..f573757 100644 --- a/swarm_copy_tests/app/test_main.py +++ b/swarm_copy_tests/app/test_main.py @@ -1,3 +1,12 @@ +import logging +from unittest.mock import patch + +from fastapi.testclient import TestClient + +from neuroagent.app.dependencies import get_settings +from neuroagent.app.main import app + + def test_settings_endpoint(app_client, dont_look_at_env_file, settings): response = app_client.get("/settings") @@ -15,3 +24,52 @@ def test_readyz(app_client): body = response.json() assert isinstance(body, dict) assert body["status"] == "ok" + + +def test_lifespan(caplog, monkeypatch, tmp_path, patch_required_env, db_connection): + get_settings.cache_clear() + caplog.set_level(logging.INFO) + + monkeypatch.setenv("NEUROAGENT_LOGGING__LEVEL", "info") + monkeypatch.setenv("NEUROAGENT_LOGGING__EXTERNAL_PACKAGES", "warning") + monkeypatch.setenv("NEUROAGENT_KNOWLEDGE_GRAPH__DOWNLOAD_HIERARCHY", "true") + monkeypatch.setenv("NEUROAGENT_DB__PREFIX", db_connection) + + save_path_brainregion = tmp_path / "fake.json" + + async def save_dummy(*args, **kwargs): + with open(save_path_brainregion, "w") as f: + f.write("test_text") + + with ( + patch("neuroagent.app.main.get_update_kg_hierarchy", new=save_dummy), + patch("neuroagent.app.main.get_cell_types_kg_hierarchy", new=save_dummy), + patch("neuroagent.app.main.get_kg_token", new=lambda *args, **kwargs: "dev"), + ): + # The with statement triggers the startup. + with TestClient(app) as test_client: + test_client.get("/healthz") + # check if the brain region dummy file was created. + assert save_path_brainregion.exists() + + assert caplog.record_tuples[0][::2] == ( + "neuroagent.app.dependencies", + "Reading the environment and instantiating settings", + ) + + assert ( + logging.getLevelName(logging.getLogger("neuroagent").getEffectiveLevel()) + == "INFO" + ) + assert ( + logging.getLevelName(logging.getLogger("httpx").getEffectiveLevel()) + == "WARNING" + ) + assert ( + logging.getLevelName(logging.getLogger("fastapi").getEffectiveLevel()) + == "WARNING" + ) + assert ( + logging.getLevelName(logging.getLogger("bluepyefe").getEffectiveLevel()) + == "CRITICAL" + ) From d01e07c961974e1379c9c31575c21dcb770fe219 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 11:59:57 +0100 Subject: [PATCH 16/22] lint --- swarm_copy/tools/bluenaas_memodel_getall.py | 8 +++----- tests/conftest.py | 2 -- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index bc42146..4de23a5 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -29,11 +29,9 @@ class InputMEModelGetAll(BaseModel): page_size: int = Field( default=20, description="Number of results returned by the API." ) - memodel_type: Literal["single-neuron-simulation", "synaptome-simulation"] = ( - Field( - default="single-neuron-simulation", - description="Type of simulation to retrieve.", - ) + memodel_type: Literal["single-neuron-simulation", "synaptome-simulation"] = Field( + default="single-neuron-simulation", + description="Type of simulation to retrieve.", ) diff --git a/tests/conftest.py b/tests/conftest.py index dc7e6e2..f30bed9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,14 +6,12 @@ import pytest import pytest_asyncio from fastapi.testclient import TestClient -from httpx import AsyncClient from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from swarm_copy.app.config import Settings from swarm_copy.app.dependencies import get_kg_token, get_settings from swarm_copy.app.main import app -from swarm_copy.tools import GetMorphoTool @pytest.fixture(name="app_client") From 2a2a8580b631c7f8019ce9d7cbb373e0c858ec4c Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 12:06:55 +0100 Subject: [PATCH 17/22] unit tests --- swarm_copy/tools/bluenaas_memodel_getall.py | 2 +- swarm_copy_tests/app/test_dependencies.py | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index 4de23a5..a7cee19 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -55,7 +55,7 @@ async def arun(self) -> PaginatedResponseUnionMEModelResponseSynaptomeModelRespo response = await self.metadata.httpx_client.get( url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/me-models", params={ - "simulation_type": self.input_schema.simulation_type, + "simulation_type": self.input_schema.memodel_type, "offset": self.input_schema.offset, "page_size": self.input_schema.page_size, }, diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py index 36390b0..cadd08c 100644 --- a/swarm_copy_tests/app/test_dependencies.py +++ b/swarm_copy_tests/app/test_dependencies.py @@ -314,15 +314,12 @@ async def test_get_vlab_and_project_valid_thread_id( user_id = "Super_user" token = "fake_token" httpx_client = AsyncClient() - httpx_mock.add_response( - url=f"{test_settings.virtual_lab.get_project_url}/test_vlab/projects/test_project", - json="test_project_ID", - ) httpx_mock.add_response( url=f"{test_settings.virtual_lab.get_project_url}/test_vlab_DB/projects/project_id_DB", json="test_project_ID", ) + # create test thread table async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) From d7c13c8f66b889052c2e3c7149ce91890b736ee6 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Mon, 9 Dec 2024 12:18:11 +0100 Subject: [PATCH 18/22] Revert conftest.py --- tests/conftest.py | 92 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 89 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f30bed9..88ee7ea 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,12 +6,16 @@ import pytest import pytest_asyncio from fastapi.testclient import TestClient +from httpx import AsyncClient +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import AIMessage from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from swarm_copy.app.config import Settings -from swarm_copy.app.dependencies import get_kg_token, get_settings -from swarm_copy.app.main import app +from neuroagent.app.config import Settings +from neuroagent.app.dependencies import get_kg_token, get_settings +from neuroagent.app.main import app +from neuroagent.tools import GetMorphoTool @pytest.fixture(name="app_client") @@ -101,3 +105,85 @@ def get_resolve_query_output(): def brain_region_json_path(): br_path = Path(__file__).parent / "data" / "brainregion_hierarchy.json" return br_path + + +@pytest.fixture +async def fake_llm_with_tools(brain_region_json_path): + class FakeFuntionChatModel(GenericFakeChatModel): + def bind_tools(self, functions: list): + return self + + def bind_functions(self, **kwargs): + return self + + # If you need another fake response to use different tools, + # you can do in your test + # ```python + # llm, _ = await anext(fake_llm_with_tools) + # llm.responses = my_fake_responses + # ``` + # and simply bind the corresponding tools + fake_responses = [ + AIMessage( + content="", + additional_kwargs={ + "tool_calls": [ + { + "index": 0, + "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", + "function": { + "arguments": '{"brain_region_id":"http://api.brain-map.org/api/v2/data/Structure/549"}', + "name": "get-morpho-tool", + }, + "type": "function", + } + ] + }, + response_metadata={"finish_reason": "tool_calls"}, + id="run-3828644d-197b-401b-8634-e6ecf01c2e7c-0", + tool_calls=[ + { + "name": "get-morpho-tool", + "args": { + "brain_region_id": ( + "http://api.brain-map.org/api/v2/data/Structure/549" + ) + }, + "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", + } + ], + ), + AIMessage( + content="Great answer", + response_metadata={"finish_reason": "stop"}, + id="run-42768b30-044a-4263-8c5c-da61429aa9da-0", + ), + ] + + # If you use this tool in your test, DO NOT FORGET to mock the url response with the following snippet: + # + # ```python + # json_path = Path(__file__).resolve().parent.parent / "data" / "knowledge_graph.json" + # with open(json_path) as f: + # knowledge_graph_response = json.load(f) + + # httpx_mock.add_response( + # url="http://fake_url", + # json=knowledge_graph_response, + # ) + # ``` + # The http call is not mocked here because one might want to change the responses + # and the tools used. + async_client = AsyncClient() + tool = GetMorphoTool( + metadata={ + "url": "http://fake_url", + "search_size": 2, + "httpx_client": async_client, + "token": "fake_token", + "brainregion_path": brain_region_json_path, + } + ) + + yield FakeFuntionChatModel(messages=iter(fake_responses)), [tool], fake_responses + await async_client.aclose() From 8d8e93e67f00299542fa6f1cdb478df87f55c127 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Wed, 11 Dec 2024 08:02:06 +0100 Subject: [PATCH 19/22] Review comments --- swarm_copy/app/dependencies.py | 1 - swarm_copy/tools/bluenaas_memodel_getall.py | 2 +- swarm_copy_tests/app/test_dependencies.py | 2 - swarm_copy_tests/app/test_main.py | 4 +- swarm_copy_tests/conftest.py | 92 +-------------------- 5 files changed, 7 insertions(+), 94 deletions(-) diff --git a/swarm_copy/app/dependencies.py b/swarm_copy/app/dependencies.py index 27e7452..0a1dd74 100644 --- a/swarm_copy/app/dependencies.py +++ b/swarm_copy/app/dependencies.py @@ -195,7 +195,6 @@ async def get_vlab_and_project( } else: thread_id = request.path_params.get("thread_id") - logging.error(f"*** Thread id: {thread_id}, User id: {user_id} ***") thread_result = await session.execute( select(Threads).where( Threads.user_id == user_id, Threads.thread_id == thread_id diff --git a/swarm_copy/tools/bluenaas_memodel_getall.py b/swarm_copy/tools/bluenaas_memodel_getall.py index a7cee19..1f77bc8 100644 --- a/swarm_copy/tools/bluenaas_memodel_getall.py +++ b/swarm_copy/tools/bluenaas_memodel_getall.py @@ -55,7 +55,7 @@ async def arun(self) -> PaginatedResponseUnionMEModelResponseSynaptomeModelRespo response = await self.metadata.httpx_client.get( url=f"{self.metadata.bluenaas_url}/neuron-model/{self.metadata.vlab_id}/{self.metadata.project_id}/me-models", params={ - "simulation_type": self.input_schema.memodel_type, + "model_type": self.input_schema.memodel_type, "offset": self.input_schema.offset, "page_size": self.input_schema.page_size, }, diff --git a/swarm_copy_tests/app/test_dependencies.py b/swarm_copy_tests/app/test_dependencies.py index cadd08c..ad8b8f2 100644 --- a/swarm_copy_tests/app/test_dependencies.py +++ b/swarm_copy_tests/app/test_dependencies.py @@ -208,7 +208,6 @@ async def test_get_vlab_and_project( ) session.add(new_thread) await session.commit() - await session.refresh(new_thread) try: # Test with info in headers. @@ -265,7 +264,6 @@ async def test_get_vlab_and_project_no_info_in_headers( ) session.add(new_thread) await session.commit() - await session.refresh(new_thread) try: # Test with no infos in headers. diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py index f573757..e286646 100644 --- a/swarm_copy_tests/app/test_main.py +++ b/swarm_copy_tests/app/test_main.py @@ -3,8 +3,8 @@ from fastapi.testclient import TestClient -from neuroagent.app.dependencies import get_settings -from neuroagent.app.main import app +from swarm_copy.app.dependencies import get_settings +from swarm_copy.app.main import app def test_settings_endpoint(app_client, dont_look_at_env_file, settings): diff --git a/swarm_copy_tests/conftest.py b/swarm_copy_tests/conftest.py index 8acd252..aeba3fc 100644 --- a/swarm_copy_tests/conftest.py +++ b/swarm_copy_tests/conftest.py @@ -7,15 +7,13 @@ import pytest_asyncio from fastapi.testclient import TestClient from httpx import AsyncClient -from langchain_core.language_models.fake_chat_models import GenericFakeChatModel -from langchain_core.messages import AIMessage from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from neuroagent.app.config import Settings -from neuroagent.app.dependencies import get_kg_token, get_settings -from neuroagent.app.main import app -from neuroagent.tools import GetMorphoTool +from swarm_copy.app.config import Settings +from swarm_copy.app.dependencies import get_kg_token, get_settings +from swarm_copy.app.main import app +from swarm_copy.tools import GetMorphoTool @pytest.fixture(name="settings") @@ -126,85 +124,3 @@ def get_resolve_query_output(): def brain_region_json_path(): br_path = Path(__file__).parent / "data" / "brainregion_hierarchy.json" return br_path - - -@pytest.fixture -async def fake_llm_with_tools(brain_region_json_path): - class FakeFuntionChatModel(GenericFakeChatModel): - def bind_tools(self, functions: list): - return self - - def bind_functions(self, **kwargs): - return self - - # If you need another fake response to use different tools, - # you can do in your test - # ```python - # llm, _ = await anext(fake_llm_with_tools) - # llm.responses = my_fake_responses - # ``` - # and simply bind the corresponding tools - fake_responses = [ - AIMessage( - content="", - additional_kwargs={ - "tool_calls": [ - { - "index": 0, - "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", - "function": { - "arguments": '{"brain_region_id":"http://api.brain-map.org/api/v2/data/Structure/549"}', - "name": "get-morpho-tool", - }, - "type": "function", - } - ] - }, - response_metadata={"finish_reason": "tool_calls"}, - id="run-3828644d-197b-401b-8634-e6ecf01c2e7c-0", - tool_calls=[ - { - "name": "get-morpho-tool", - "args": { - "brain_region_id": ( - "http://api.brain-map.org/api/v2/data/Structure/549" - ) - }, - "id": "call_zHhwfNLSvGGHXMoILdIYtDVI", - } - ], - ), - AIMessage( - content="Great answer", - response_metadata={"finish_reason": "stop"}, - id="run-42768b30-044a-4263-8c5c-da61429aa9da-0", - ), - ] - - # If you use this tool in your test, DO NOT FORGET to mock the url response with the following snippet: - # - # ```python - # json_path = Path(__file__).resolve().parent.parent / "data" / "knowledge_graph.json" - # with open(json_path) as f: - # knowledge_graph_response = json.load(f) - - # httpx_mock.add_response( - # url="http://fake_url", - # json=knowledge_graph_response, - # ) - # ``` - # The http call is not mocked here because one might want to change the responses - # and the tools used. - async_client = AsyncClient() - tool = GetMorphoTool( - metadata={ - "url": "http://fake_url", - "search_size": 2, - "httpx_client": async_client, - "token": "fake_token", - "brainregion_path": brain_region_json_path, - } - ) - - yield FakeFuntionChatModel(messages=iter(fake_responses)), [tool], fake_responses - await async_client.aclose() From 3207b856c4f2a21296f01eacfd10d6062b17cb37 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Wed, 11 Dec 2024 08:14:27 +0100 Subject: [PATCH 20/22] Fixed lifespan test --- swarm_copy_tests/app/test_main.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/swarm_copy_tests/app/test_main.py b/swarm_copy_tests/app/test_main.py index e286646..23f4299 100644 --- a/swarm_copy_tests/app/test_main.py +++ b/swarm_copy_tests/app/test_main.py @@ -42,9 +42,9 @@ async def save_dummy(*args, **kwargs): f.write("test_text") with ( - patch("neuroagent.app.main.get_update_kg_hierarchy", new=save_dummy), - patch("neuroagent.app.main.get_cell_types_kg_hierarchy", new=save_dummy), - patch("neuroagent.app.main.get_kg_token", new=lambda *args, **kwargs: "dev"), + patch("swarm_copy.app.main.get_update_kg_hierarchy", new=save_dummy), + patch("swarm_copy.app.main.get_cell_types_kg_hierarchy", new=save_dummy), + patch("swarm_copy.app.main.get_kg_token", new=lambda *args, **kwargs: "dev"), ): # The with statement triggers the startup. with TestClient(app) as test_client: @@ -53,12 +53,12 @@ async def save_dummy(*args, **kwargs): assert save_path_brainregion.exists() assert caplog.record_tuples[0][::2] == ( - "neuroagent.app.dependencies", + "swarm_copy.app.dependencies", "Reading the environment and instantiating settings", ) assert ( - logging.getLevelName(logging.getLogger("neuroagent").getEffectiveLevel()) + logging.getLevelName(logging.getLogger("swarm_copy").getEffectiveLevel()) == "INFO" ) assert ( From 0e6e11414336eca38f41d92fff8f37e94a76b8c2 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Tue, 17 Dec 2024 14:02:59 +0100 Subject: [PATCH 21/22] Fixed fixture --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 88ee7ea..65c4ac7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -107,7 +107,7 @@ def brain_region_json_path(): return br_path -@pytest.fixture +@pytest_asyncio.fixture async def fake_llm_with_tools(brain_region_json_path): class FakeFuntionChatModel(GenericFakeChatModel): def bind_tools(self, functions: list): From 78ba3c98e20cea3fbc731498704460592aa9f069 Mon Sep 17 00:00:00 2001 From: kanesoban Date: Tue, 17 Dec 2024 16:57:38 +0100 Subject: [PATCH 22/22] Fixed test --- swarm_copy_tests/conftest.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/swarm_copy_tests/conftest.py b/swarm_copy_tests/conftest.py index 31deefb..48c5a59 100644 --- a/swarm_copy_tests/conftest.py +++ b/swarm_copy_tests/conftest.py @@ -167,3 +167,24 @@ def get_resolve_query_output(): def brain_region_json_path(): br_path = Path(__file__).parent / "data" / "brainregion_hierarchy.json" return br_path + + +@pytest.fixture(name="settings") +def settings(): + return Settings( + tools={ + "literature": { + "url": "fake_literature_url", + }, + }, + knowledge_graph={ + "base_url": "https://fake_url/api/nexus/v1", + }, + openai={ + "token": "fake_token", + }, + keycloak={ + "username": "fake_username", + "password": "fake_password", + }, + )