diff --git a/llama_stack/apis/datasets/datasets.py b/llama_stack/apis/datasets/datasets.py index 9160e1e1..e2b764d7 100644 --- a/llama_stack/apis/datasets/datasets.py +++ b/llama_stack/apis/datasets/datasets.py @@ -50,11 +50,5 @@ async def get_dataset( dataset_identifier: str, ) -> Optional[DatasetDefWithProvider]: ... - @webmethod(route="/datasets/delete") - async def delete_dataset( - self, - dataset_identifier: str, - ) -> None: ... - @webmethod(route="/datasets/list", method="GET") async def list_datasets(self) -> List[DatasetDefWithProvider]: ... diff --git a/llama_stack/distribution/datatypes.py b/llama_stack/distribution/datatypes.py index 0044de09..10f78b78 100644 --- a/llama_stack/distribution/datatypes.py +++ b/llama_stack/distribution/datatypes.py @@ -14,11 +14,12 @@ from llama_stack.apis.models import * # noqa: F403 from llama_stack.apis.shields import * # noqa: F403 from llama_stack.apis.memory_banks import * # noqa: F403 +from llama_stack.apis.datasets import * # noqa: F403 +from llama_stack.apis.datasetio import DatasetIO from llama_stack.apis.inference import Inference from llama_stack.apis.memory import Memory from llama_stack.apis.safety import Safety - LLAMA_STACK_BUILD_CONFIG_VERSION = "2" LLAMA_STACK_RUN_CONFIG_VERSION = "2" @@ -30,18 +31,21 @@ ModelDef, ShieldDef, MemoryBankDef, + DatasetDef, ] RoutableObjectWithProvider = Union[ ModelDefWithProvider, ShieldDefWithProvider, MemoryBankDefWithProvider, + DatasetDefWithProvider, ] RoutedProtocol = Union[ Inference, Safety, Memory, + DatasetIO, ] diff --git a/llama_stack/distribution/distribution.py b/llama_stack/distribution/distribution.py index 999646cc..53d54447 100644 --- a/llama_stack/distribution/distribution.py +++ b/llama_stack/distribution/distribution.py @@ -35,6 +35,10 @@ def builtin_automatically_routed_apis() -> List[AutoRoutedApiInfo]: routing_table_api=Api.memory_banks, router_api=Api.memory, ), + AutoRoutedApiInfo( + routing_table_api=Api.datasets, + router_api=Api.datasetio, + ), ] diff --git a/llama_stack/distribution/resolver.py b/llama_stack/distribution/resolver.py index 78d76e97..2e6b64a5 100644 --- a/llama_stack/distribution/resolver.py +++ b/llama_stack/distribution/resolver.py @@ -12,6 +12,8 @@ from llama_stack.distribution.datatypes import * # noqa: F403 from llama_stack.apis.agents import Agents +from llama_stack.apis.datasetio import DatasetIO +from llama_stack.apis.datasets import Datasets from llama_stack.apis.inference import Inference from llama_stack.apis.inspect import Inspect from llama_stack.apis.memory import Memory @@ -38,6 +40,8 @@ def api_protocol_map() -> Dict[Api, Any]: Api.safety: Safety, Api.shields: Shields, Api.telemetry: Telemetry, + Api.datasets: Datasets, + Api.datasetio: DatasetIO, } diff --git a/llama_stack/distribution/routers/__init__.py b/llama_stack/distribution/routers/__init__.py index 28851390..4970e93e 100644 --- a/llama_stack/distribution/routers/__init__.py +++ b/llama_stack/distribution/routers/__init__.py @@ -8,6 +8,7 @@ from llama_stack.distribution.datatypes import * # noqa: F403 from .routing_tables import ( + DatasetsRoutingTable, MemoryBanksRoutingTable, ModelsRoutingTable, ShieldsRoutingTable, @@ -23,6 +24,7 @@ async def get_routing_table_impl( "memory_banks": MemoryBanksRoutingTable, "models": ModelsRoutingTable, "shields": ShieldsRoutingTable, + "datasets": DatasetsRoutingTable, } if api.value not in api_to_tables: raise ValueError(f"API {api.value} not found in router map") @@ -33,12 +35,13 @@ async def get_routing_table_impl( async def get_auto_router_impl(api: Api, routing_table: RoutingTable, _deps) -> Any: - from .routers import InferenceRouter, MemoryRouter, SafetyRouter + from .routers import DatasetIORouter, InferenceRouter, MemoryRouter, SafetyRouter api_to_routers = { "memory": MemoryRouter, "inference": InferenceRouter, "safety": SafetyRouter, + "datasetio": DatasetIORouter, } if api.value not in api_to_routers: raise ValueError(f"API {api.value} not found in router map") diff --git a/llama_stack/distribution/routers/routers.py b/llama_stack/distribution/routers/routers.py index 26a0988e..31b8efa4 100644 --- a/llama_stack/distribution/routers/routers.py +++ b/llama_stack/distribution/routers/routers.py @@ -6,11 +6,13 @@ from typing import Any, AsyncGenerator, Dict, List +from llama_stack.apis.datasetio.datasetio import DatasetIO from llama_stack.distribution.datatypes import RoutingTable from llama_stack.apis.memory import * # noqa: F403 from llama_stack.apis.inference import * # noqa: F403 from llama_stack.apis.safety import * # noqa: F403 +from llama_stack.apis.datasetio import * # noqa: F403 class MemoryRouter(Memory): @@ -160,3 +162,33 @@ async def run_shield( messages=messages, params=params, ) + + +class DatasetIORouter(DatasetIO): + def __init__( + self, + routing_table: RoutingTable, + ) -> None: + self.routing_table = routing_table + + async def initialize(self) -> None: + pass + + async def shutdown(self) -> None: + pass + + async def get_rows_paginated( + self, + dataset_id: str, + rows_in_page: int, + page_token: Optional[str] = None, + filter_condition: Optional[str] = None, + ) -> PaginatedRowsResult: + return await self.routing_table.get_provider_impl( + dataset_id + ).get_rows_paginated( + dataset_id=dataset_id, + rows_in_page=rows_in_page, + page_token=page_token, + filter_condition=filter_condition, + ) diff --git a/llama_stack/distribution/routers/routing_tables.py b/llama_stack/distribution/routers/routing_tables.py index 597dbed0..db0946d8 100644 --- a/llama_stack/distribution/routers/routing_tables.py +++ b/llama_stack/distribution/routers/routing_tables.py @@ -11,6 +11,7 @@ from llama_stack.apis.models import * # noqa: F403 from llama_stack.apis.shields import * # noqa: F403 from llama_stack.apis.memory_banks import * # noqa: F403 +from llama_stack.apis.datasets import * # noqa: F403 from llama_stack.distribution.datatypes import * # noqa: F403 @@ -27,6 +28,10 @@ async def register_object_with_provider(obj: RoutableObject, p: Any) -> None: await p.register_shield(obj) elif api == Api.memory: await p.register_memory_bank(obj) + elif api == Api.datasetio: + await p.register_dataset(obj) + else: + raise ValueError(f"Unknown API {api} for registering object with provider") Registry = Dict[str, List[RoutableObjectWithProvider]] @@ -80,6 +85,16 @@ def add_objects(objs: List[RoutableObjectWithProvider]) -> None: add_objects(memory_banks) + elif api == Api.datasetio: + p.dataset_store = self + datasets = await p.list_datasets() + + # do in-memory updates due to pesky Annotated unions + for d in datasets: + d.provider_id = pid + + add_objects(datasets) + async def shutdown(self) -> None: for p in self.impls_by_provider_id.values(): await p.shutdown() @@ -137,6 +152,7 @@ async def register_object(self, obj: RoutableObjectWithProvider): raise ValueError(f"Provider `{obj.provider_id}` not found") p = self.impls_by_provider_id[obj.provider_id] + await register_object_with_provider(obj, p) if obj.identifier not in self.registry: @@ -190,3 +206,19 @@ async def register_memory_bank( self, memory_bank: MemoryBankDefWithProvider ) -> None: await self.register_object(memory_bank) + + +class DatasetsRoutingTable(CommonRoutingTableImpl, Datasets): + async def list_datasets(self) -> List[DatasetDefWithProvider]: + objects = [] + for objs in self.registry.values(): + objects.extend(objs) + return objects + + async def get_dataset( + self, dataset_identifier: str + ) -> Optional[DatasetDefWithProvider]: + return self.get_object_by_identifier(identifier) + + async def register_dataset(self, dataset_def: DatasetDefWithProvider) -> None: + await self.register_object(dataset_def) diff --git a/llama_stack/providers/datatypes.py b/llama_stack/providers/datatypes.py index 777cd855..d7e2d4d0 100644 --- a/llama_stack/providers/datatypes.py +++ b/llama_stack/providers/datatypes.py @@ -10,6 +10,8 @@ from llama_models.schema_utils import json_schema_type from pydantic import BaseModel, Field +from llama_stack.apis.datasets import DatasetDef + from llama_stack.apis.memory_banks import MemoryBankDef from llama_stack.apis.models import ModelDef @@ -22,12 +24,14 @@ class Api(Enum): safety = "safety" agents = "agents" memory = "memory" + datasetio = "datasetio" telemetry = "telemetry" models = "models" shields = "shields" memory_banks = "memory_banks" + datasets = "datasets" # built-in API inspect = "inspect" @@ -51,6 +55,12 @@ async def list_memory_banks(self) -> List[MemoryBankDef]: ... async def register_memory_bank(self, memory_bank: MemoryBankDef) -> None: ... +class DatasetsProtocolPrivate(Protocol): + async def list_datasets(self) -> List[DatasetDef]: ... + + async def register_datasets(self, dataset_def: DatasetDef) -> None: ... + + @json_schema_type class ProviderSpec(BaseModel): api: Api diff --git a/llama_stack/providers/impls/meta_reference/datasetio/__init__.py b/llama_stack/providers/impls/meta_reference/datasetio/__init__.py new file mode 100644 index 00000000..9a65f5c3 --- /dev/null +++ b/llama_stack/providers/impls/meta_reference/datasetio/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from .config import MetaReferenceDatasetIOConfig + + +async def get_provider_impl( + config: MetaReferenceDatasetIOConfig, + _deps, +): + from .datasetio import MetaReferenceDatasetIOImpl + + impl = MetaReferenceDatasetIOImpl(config) + await impl.initialize() + return impl diff --git a/llama_stack/providers/impls/meta_reference/datasetio/config.py b/llama_stack/providers/impls/meta_reference/datasetio/config.py new file mode 100644 index 00000000..e667e325 --- /dev/null +++ b/llama_stack/providers/impls/meta_reference/datasetio/config.py @@ -0,0 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. +from llama_stack.apis.datasetio import * # noqa: F401, F403 + + +class MetaReferenceDatasetIOConfig(BaseModel): ... diff --git a/llama_stack/providers/impls/meta_reference/datasetio/datasetio.py b/llama_stack/providers/impls/meta_reference/datasetio/datasetio.py new file mode 100644 index 00000000..a8e648e4 --- /dev/null +++ b/llama_stack/providers/impls/meta_reference/datasetio/datasetio.py @@ -0,0 +1,142 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. +from typing import List, Optional + +import pandas + +from llama_models.llama3.api.datatypes import * # noqa: F403 + +from llama_stack.apis.datasetio import * # noqa: F403 +from abc import ABC, abstractmethod +from dataclasses import dataclass + +from llama_stack.providers.datatypes import DatasetsProtocolPrivate + +from .config import MetaReferenceDatasetIOConfig + + +class BaseDataset(ABC): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + @abstractmethod + def __len__(self) -> int: + raise NotImplementedError() + + @abstractmethod + def __getitem__(self, idx): + raise NotImplementedError() + + @abstractmethod + def load(self): + raise NotImplementedError() + + +@dataclass +class DatasetInfo: + dataset_def: DatasetDef + dataset_impl: BaseDataset + + +class PandasDataframeDataset(BaseDataset): + def __init__(self, dataset_def: DatasetDef, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.dataset_def = dataset_def + self.df = None + + def __len__(self) -> int: + assert self.df is not None, "Dataset not loaded. Please call .load() first" + return len(self.df) + + def __getitem__(self, idx): + if isinstance(idx, slice): + return self.df.iloc[idx].to_dict(orient="records") + else: + return self.df.iloc[idx].to_dict() + + def load(self) -> None: + if self.df is not None: + return + + # TODO: more robust support w/ data url + if self.dataset_def.url.uri.endswith(".csv"): + df = pandas.read_csv(self.dataset_def.url.uri) + elif self.dataset_def.url.uri.endswith(".xlsx"): + df = pandas.read_excel(self.dataset_def.url.uri) + elif self.dataset_def.url.uri.startswith("data:"): + parts = parse_data_url(self.dataset_def.url.uri) + data = parts["data"] + if parts["is_base64"]: + data = base64.b64decode(data) + else: + data = unquote(data) + encoding = parts["encoding"] or "utf-8" + data = data.encode(encoding) + + mime_type = parts["mimetype"] + mime_category = mime_type.split("/")[0] + data_bytes = io.BytesIO(data) + + if mime_category == "text": + df = pandas.read_csv(data_bytes) + else: + df = pandas.read_excel(data_bytes) + else: + raise ValueError(f"Unsupported file type: {self.dataset_def.url}") + + self.df = df + + +class MetaReferenceDatasetIOImpl(DatasetIO, DatasetsProtocolPrivate): + def __init__(self, config: MetaReferenceDatasetIOConfig) -> None: + self.config = config + # local registry for keeping track of datasets within the provider + self.dataset_infos = {} + + async def initialize(self) -> None: ... + + async def shutdown(self) -> None: ... + + async def register_dataset( + self, + dataset_def: DatasetDef, + ) -> None: + dataset_impl = PandasDataframeDataset(dataset_def) + self.dataset_infos[dataset_def.identifier] = DatasetInfo( + dataset_def=dataset_def, + dataset_impl=dataset_impl, + ) + + async def list_datasets(self) -> List[DatasetDef]: + return [i.dataset_def for i in self.dataset_infos.values()] + + async def get_rows_paginated( + self, + dataset_id: str, + rows_in_page: int, + page_token: Optional[str] = None, + filter_condition: Optional[str] = None, + ) -> PaginatedRowsResult: + dataset_info = self.dataset_infos.get(dataset_id) + dataset_info.dataset_impl.load() + + if page_token is None: + next_page_token = 0 + else: + next_page_token = int(page_token) + + if rows_in_page == -1: + rows = dataset_info.dataset_impl[next_page_token:] + + start = next_page_token + end = min(start + rows_in_page, len(dataset_info.dataset_impl)) + rows = dataset_info.dataset_impl[start:end] + + return PaginatedRowsResult( + rows=rows, + total_count=len(rows), + next_page_token=str(end), + ) diff --git a/llama_stack/providers/registry/datasetio.py b/llama_stack/providers/registry/datasetio.py new file mode 100644 index 00000000..27e80ff5 --- /dev/null +++ b/llama_stack/providers/registry/datasetio.py @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from typing import List + +from llama_stack.distribution.datatypes import * # noqa: F403 + + +def available_providers() -> List[ProviderSpec]: + return [ + InlineProviderSpec( + api=Api.datasetio, + provider_type="meta-reference", + pip_packages=["pandas"], + module="llama_stack.providers.impls.meta_reference.datasetio", + config_class="llama_stack.providers.impls.meta_reference.datasetio.MetaReferenceDatasetIOConfig", + api_dependencies=[], + ), + ] diff --git a/llama_stack/providers/tests/datasetio/__init__.py b/llama_stack/providers/tests/datasetio/__init__.py new file mode 100644 index 00000000..756f351d --- /dev/null +++ b/llama_stack/providers/tests/datasetio/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. diff --git a/llama_stack/providers/tests/datasetio/provider_config_example.yaml b/llama_stack/providers/tests/datasetio/provider_config_example.yaml new file mode 100644 index 00000000..c0565a39 --- /dev/null +++ b/llama_stack/providers/tests/datasetio/provider_config_example.yaml @@ -0,0 +1,4 @@ +providers: + - provider_id: test-meta + provider_type: meta-reference + config: {} diff --git a/llama_stack/providers/tests/datasetio/test_datasetio.py b/llama_stack/providers/tests/datasetio/test_datasetio.py new file mode 100644 index 00000000..85235a64 --- /dev/null +++ b/llama_stack/providers/tests/datasetio/test_datasetio.py @@ -0,0 +1,109 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. +import os + +import pytest +import pytest_asyncio + +from llama_stack.apis.datasetio import * # noqa: F403 +from llama_stack.distribution.datatypes import * # noqa: F403 +from llama_stack.providers.tests.resolver import resolve_impls_for_test + +# How to run this test: +# +# 1. Ensure you have a conda with the right dependencies installed. This is a bit tricky +# since it depends on the provider you are testing. On top of that you need +# `pytest` and `pytest-asyncio` installed. +# +# 2. Copy and modify the provider_config_example.yaml depending on the provider you are testing. +# +# 3. Run: +# +# ```bash +# PROVIDER_ID= \ +# PROVIDER_CONFIG=provider_config.yaml \ +# pytest -s llama_stack/providers/tests/datasetio/test_datasetio.py \ +# --tb=short --disable-warnings +# ``` + + +@pytest_asyncio.fixture(scope="session") +async def datasetio_settings(): + impls = await resolve_impls_for_test( + Api.datasetio, + ) + return { + "datasetio_impl": impls[Api.datasetio], + "datasets_impl": impls[Api.datasets], + } + + +async def register_dataset(datasets_impl: Datasets): + dataset = DatasetDefWithProvider( + identifier="test_dataset", + provider_id=os.environ["PROVIDER_ID"], + url=URL( + uri="https://openaipublic.blob.core.windows.net/simple-evals/mmlu.csv", + ), + columns_schema={}, + ) + await datasets_impl.register_dataset(dataset) + + +@pytest.mark.asyncio +async def test_datasets_list(datasetio_settings): + # NOTE: this needs you to ensure that you are starting from a clean state + # but so far we don't have an unregister API unfortunately, so be careful + datasets_impl = datasetio_settings["datasets_impl"] + response = await datasets_impl.list_datasets() + assert isinstance(response, list) + assert len(response) == 0 + + +@pytest.mark.asyncio +async def test_datasets_register(datasetio_settings): + # NOTE: this needs you to ensure that you are starting from a clean state + # but so far we don't have an unregister API unfortunately, so be careful + datasets_impl = datasetio_settings["datasets_impl"] + await register_dataset(datasets_impl) + + response = await datasets_impl.list_datasets() + assert isinstance(response, list) + assert len(response) == 1 + + # register same dataset with same id again will fail + await register_dataset(datasets_impl) + response = await datasets_impl.list_datasets() + assert isinstance(response, list) + assert len(response) == 1 + assert response[0].identifier == "test_dataset" + + +@pytest.mark.asyncio +async def test_get_rows_paginated(datasetio_settings): + datasetio_impl = datasetio_settings["datasetio_impl"] + datasets_impl = datasetio_settings["datasets_impl"] + await register_dataset(datasets_impl) + + response = await datasetio_impl.get_rows_paginated( + dataset_id="test_dataset", + rows_in_page=3, + ) + + assert isinstance(response.rows, list) + assert len(response.rows) == 3 + assert response.next_page_token == "3" + + # iterate over all rows + response = await datasetio_impl.get_rows_paginated( + dataset_id="test_dataset", + rows_in_page=10, + page_token=response.next_page_token, + ) + + assert isinstance(response.rows, list) + assert len(response.rows) == 10 + assert response.next_page_token == "13" diff --git a/tests/examples/evals-tgi-run.yaml b/tests/examples/evals-tgi-run.yaml new file mode 100644 index 00000000..8edb050c --- /dev/null +++ b/tests/examples/evals-tgi-run.yaml @@ -0,0 +1,52 @@ +version: '2' +built_at: '2024-10-08T17:40:45.325529' +image_name: local +docker_image: null +conda_env: local +apis: +- shields +- safety +- agents +- models +- memory +- memory_banks +- inference +- datasets +- datasetio +providers: + datasetio: + - provider_id: meta0 + provider_type: meta-reference + config: {} + inference: + - provider_id: tgi0 + provider_type: remote::tgi + config: + url: http://127.0.0.1:5009 + memory: + - provider_id: meta-reference + provider_type: meta-reference + config: {} + agents: + - provider_id: meta-reference + provider_type: meta-reference + config: + persistence_store: + namespace: null + type: sqlite + db_path: ~/.llama/runtime/kvstore.db + telemetry: + - provider_id: meta-reference + provider_type: meta-reference + config: {} + safety: + - provider_id: meta-reference + provider_type: meta-reference + config: + llama_guard_shield: + model: Llama-Guard-3-1B + excluded_categories: [] + disable_input_check: false + disable_output_check: false + prompt_guard_shield: + model: Prompt-Guard-86M