Skip to content

Commit

Permalink
Fix _process_params method when body is present (#429)
Browse files Browse the repository at this point in the history
  • Loading branch information
rjambrecic authored Oct 18, 2024
1 parent 7d0763b commit d1f2566
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 9 deletions.
19 changes: 10 additions & 9 deletions fastagency/api/openapi/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,15 +98,16 @@ def _process_params(

url = self.servers[0]["url"] + expanded_path

body_dict = (
{
"json": kwargs[body].model_dump()
if hasattr(kwargs[body], "model_dump")
else kwargs[body].dict()
}
if body and body in kwargs
else {}
)
body_dict = {}
if body and body in kwargs:
body_value = kwargs[body]
if isinstance(body_value, dict):
body_dict = {"json": body_value}
elif hasattr(body_value, "model_dump"):
body_dict = {"json": body_value.model_dump()}
else:
body_dict = {"json": body_value.dict()}

body_dict["headers"] = {"Content-Type": "application/json"}
if security:
q_params, body_dict = kwargs["security"].add_security(q_params, body_dict)
Expand Down
70 changes: 70 additions & 0 deletions tests/api/openapi/test_endpoint_with_body.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from typing import Any

import pytest
from autogen import ConversableAgent, UserProxyAgent
from fastapi import FastAPI
from pydantic import BaseModel

from fastagency.api.openapi.client import OpenAPI


def create_fastapi_app_with_body(host: str, port: int) -> FastAPI:
class Item(BaseModel):
name: str
price: float

app = FastAPI(
servers=[
{"url": f"http://{host}:{port}", "description": "Local development server"}
]
)

@app.post("/items")
async def create_item(item: Item) -> str:
return "Item created"

return app


@pytest.mark.azure_oai
@pytest.mark.parametrize(
"fastapi_openapi_url",
[(create_fastapi_app_with_body)],
indirect=["fastapi_openapi_url"],
)
def test_end2end(
fastapi_openapi_url: str,
azure_gpt35_turbo_16k_llm_config: dict[str, Any],
) -> None:
api = OpenAPI.create(openapi_url=fastapi_openapi_url)

agent = ConversableAgent(name="agent", llm_config=azure_gpt35_turbo_16k_llm_config)
user_proxy = UserProxyAgent(
name="user_proxy",
llm_config=azure_gpt35_turbo_16k_llm_config,
human_input_mode="NEVER",
)

api._register_for_llm(agent)
api._register_for_execution(user_proxy)

message = "Add item with name 'apple', price 1.0"
user_proxy.initiate_chat(
agent,
message=message,
summary_method="reflection_with_llm",
max_turns=3,
)

message_existed = False
expected_message = "Item created"
for message in agent.chat_messages[user_proxy]:
if (
isinstance(message, dict)
and "content" in message
and isinstance(message["content"], str)
and message["content"] == expected_message
):
message_existed = True
break
assert message_existed, f"Expected message '{expected_message}' not found"

0 comments on commit d1f2566

Please sign in to comment.