Skip to content

Commit

Permalink
Initial openapi impl
Browse files Browse the repository at this point in the history
  • Loading branch information
vblagoje committed May 28, 2024
1 parent 80d80cb commit cd297c1
Show file tree
Hide file tree
Showing 37 changed files with 4,042 additions and 1 deletion.
6 changes: 6 additions & 0 deletions haystack_experimental/components/connectors/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# SPDX-FileCopyrightText: 2022-present deepset GmbH <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0
from haystack_experimental.components.connectors.openapi import OpenAPIServiceConnector

__all__ = ["OpenAPIServiceConnector"]
125 changes: 125 additions & 0 deletions haystack_experimental/components/connectors/openapi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# SPDX-FileCopyrightText: 2022-present deepset GmbH <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0

import json
from typing import Any, Dict, List, Optional, Union

from haystack import component, logging
from haystack.dataclasses import ChatMessage, ChatRole

from haystack_experimental.util.openapi import ClientConfigurationBuilder, OpenAPIServiceClient, validate_provider

logger = logging.getLogger(__name__)


@component
class OpenAPIServiceConnector:
"""
The `OpenAPIServiceConnector` component connects the Haystack framework to OpenAPI services.
It integrates with `ChatMessage` dataclass, where the payload in messages is used to determine the method to be
called and the parameters to be passed. The response from the service is returned as a `ChatMessage`.
Function calling payloads from OpenAI, Anthropic, and Cohere LLMs are supported.
Before using this component, users usually resolve function calling function definitions with a help of
`OpenAPIServiceToFunctions` component.
The example below demonstrates how to use the `OpenAPIServiceConnector` to invoke a method on a
https://serper.dev/ service specified via OpenAPI specification.
Note, however, that `OpenAPIServiceConnector` is usually not meant to be used directly, but rather as part of a
pipeline that includes the `OpenAPIServiceToFunctions` component and an `OpenAIChatGenerator` component using LLM
with the function calling capabilities. In the example below we use the function calling payload directly, but in a
real-world scenario, the function calling payload would usually be generated by the `OpenAIChatGenerator`
component.
Usage example:
```python
import json
import requests
from haystack_experimental.components.connectors import OpenAPIServiceConnector
from haystack.dataclasses import ChatMessage
fc_payload = [{'function': {'arguments': '{"q": "Why was Sam Altman ousted from OpenAI?"}', 'name': 'search'},
'id': 'call_PmEBYvZ7mGrQP5PUASA5m9wO', 'type': 'function'}]
serper_token = <your_serper_dev_token>
serperdev_openapi_spec = json.loads(requests.get("https://bit.ly/serper_dev_spec").text)
service_connector = OpenAPIServiceConnector()
result = service_connector.run(messages=[ChatMessage.from_assistant(json.dumps(fc_payload))],
service_openapi_spec=serperdev_openapi_spec, service_credentials=serper_token)
print(result)
>> {'service_response': [ChatMessage(content='{"searchParameters": {"q": "Why was Sam Altman ousted from OpenAI?",
>> "type": "search", "engine": "google"}, "answerBox": {"snippet": "Concerns over AI safety and OpenAI\'s role
>> in protecting were at the center of Altman\'s brief ouster from the company."...
```
"""

def __init__(self, provider: Optional[str] = None):
"""
Initializes the OpenAPIServiceConnector instance.
"""
self.llm_provider = validate_provider(provider or "openai")

@component.output_types(service_response=Dict[str, Any])
def run(
self,
messages: List[ChatMessage],
service_openapi_spec: Dict[str, Any],
service_credentials: Optional[Union[dict, str]] = None,
) -> Dict[str, List[ChatMessage]]:
"""
Processes a list of chat messages to invoke a method on an OpenAPI service.
It parses the last message in the list, expecting it to contain an OpenAI function calling descriptor
(name & parameters) in JSON format.
:param messages: A list of `ChatMessage` objects containing the messages to be processed. The last message
should contain the function invocation payload in OpenAI function calling format. See the example in the class
docstring for the expected format.
:param service_openapi_spec: The OpenAPI JSON specification object of the service to be invoked.
:param service_credentials: The credentials to be used for authentication with the service.
Currently, only the http and apiKey OpenAPI security schemes are supported.
:return: A dictionary with the following keys:
- `service_response`: a list of `ChatMessage` objects, each containing the response from the service. The
response is in JSON format, and the `content` attribute of the `ChatMessage`
contains the JSON string.
:raises ValueError: If the last message is not from the assistant or if it does not contain the correct
payload to invoke a method on the service.
"""

last_message = messages[-1]
if not last_message.is_from(ChatRole.ASSISTANT):
raise ValueError(f"{last_message} is not from the assistant.")
if not last_message.content:
raise ValueError("Function calling message content is empty.")

builder = ClientConfigurationBuilder()
config_openapi = (
builder.with_openapi_spec(service_openapi_spec)
.with_credentials(service_credentials or {})
.with_provider(self.llm_provider)
.build()
)
logger.debug(f"Invoking service {config_openapi.get_openapi_spec().get_name()} with {last_message.content}")
openapi_service = OpenAPIServiceClient(config_openapi)
try:
payload = (
json.loads(last_message.content) if isinstance(last_message.content, str) else last_message.content
)
service_response = openapi_service.invoke(payload)
except Exception as e: # pylint: disable=broad-exception-caught
logger.error(f"Error invoking OpenAPI endpoint. Error: {e}")
service_response = {"error": str(e)}
response_messages = [ChatMessage.from_user(json.dumps(service_response))]

return {"service_response": response_messages}
6 changes: 6 additions & 0 deletions haystack_experimental/components/converters/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# SPDX-FileCopyrightText: 2022-present deepset GmbH <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0
from haystack_experimental.components.converters.openapi import OpenAPIServiceToFunctions

__all__ = ["OpenAPIServiceToFunctions"]
83 changes: 83 additions & 0 deletions haystack_experimental/components/converters/openapi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# SPDX-FileCopyrightText: 2022-present deepset GmbH <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0

from pathlib import Path
from typing import Any, Dict, List, Optional, Union

from haystack import component, logging
from haystack.dataclasses.byte_stream import ByteStream

from haystack_experimental.util.openapi import ClientConfigurationBuilder, validate_provider

logger = logging.getLogger(__name__)


@component
class OpenAPIServiceToFunctions:
"""
Converts OpenAPI service schemas to a format suitable for OpenAI, Anthropic, or Cohere function calling.
The definition must respect OpenAPI specification 3.0.0 or higher.
It can be specified in JSON or YAML format.
Each function must have:
- unique operationId
- description
- requestBody and/or parameters
- schema for the requestBody and/or parameters
For more details on OpenAPI specification see the
[official documentation](https://github.com/OAI/OpenAPI-Specification).
Usage example:
```python
from haystack_experimental.components.converters import OpenAPIServiceToFunctions
converter = OpenAPIServiceToFunctions()
result = converter.run(sources=["path/to/openapi_definition.yaml"])
assert result["functions"]
```
"""

MIN_REQUIRED_OPENAPI_SPEC_VERSION = 3

def __init__(self, provider: Optional[str] = None):
"""
Create an OpenAPIServiceToFunctions component.
:param provider: The LLM provider to use, defaults to "openai".
"""
self.llm_provider = validate_provider(provider or "openai")

@component.output_types(functions=List[Dict[str, Any]], openapi_specs=List[Dict[str, Any]])
def run(self, sources: List[Union[str, Path, ByteStream]]) -> Dict[str, Any]:
"""
Converts OpenAPI definitions into LLM specific function calling format.
:param sources:
File paths or ByteStream objects of OpenAPI definitions (in JSON or YAML format).
:returns:
A dictionary with the following keys:
- functions: Function definitions in JSON object format
- openapi_specs: OpenAPI specs in JSON/YAML object format with resolved references
:raises RuntimeError:
If the OpenAPI definitions cannot be downloaded or processed.
:raises ValueError:
If the source type is not recognized or no functions are found in the OpenAPI definitions.
"""
all_extracted_fc_definitions: List[Dict[str, Any]] = []
all_openapi_specs = []

builder = ClientConfigurationBuilder()
for source in sources:
source = source.to_string() if isinstance(source, ByteStream) else source
# to get tools definitions all we need is the openapi spec
config_openapi = builder.with_openapi_spec(source).with_provider(self.llm_provider).build()

all_extracted_fc_definitions.extend(config_openapi.get_tools_definitions())
all_openapi_specs.append(config_openapi.get_openapi_spec().to_dict(resolve_references=True))
if not all_extracted_fc_definitions:
logger.warning("No OpenAI function definitions extracted from the provided OpenAPI specification sources.")

return {"functions": all_extracted_fc_definitions, "openapi_specs": all_openapi_specs}
Empty file.
Loading

0 comments on commit cd297c1

Please sign in to comment.