Skip to content

Commit

Permalink
[simulation] refactor talks_to to use generative agent llm (#69)
Browse files Browse the repository at this point in the history
* feat: modify 'talks_to' to use generative agent LLM

* fix: disable verbose

* fix: grammar
  • Loading branch information
tonyz0x0 authored May 4, 2023
1 parent 45b6986 commit 5bee64b
Show file tree
Hide file tree
Showing 2 changed files with 64 additions and 40 deletions.
63 changes: 63 additions & 0 deletions skyagi/src/skyagi/simulation/simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,15 @@
from typing import List

import faiss
from langchain import LLMChain
from langchain.docstore import InMemoryDocstore
from langchain.embeddings import OpenAIEmbeddings
from langchain.prompts.chat import (
AIMessagePromptTemplate,
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
from langchain.retrievers import TimeWeightedVectorStoreRetriever
from langchain.vectorstores import FAISS

Expand Down Expand Up @@ -77,3 +84,59 @@ def interview_agent(agent: GenerativeAgent, message: str, username: str) -> str:
"""Help the notebook user interact with the agent."""
new_message = f"{username} says {message}"
return agent.generate_dialogue_response(new_message)[1]


# whether initiator wants to talk to recipient based on the observations
def talks_to(
initiator: GenerativeAgent, recipient: GenerativeAgent, observations: List[str]
) -> str:
instruct = "Here are the timeline of events happened for these NPC characters:\n{observation}\n"
instruct += "I want you to behave as {initiator_name} and talk to me as I am {recipient_name}.\n"
instruct += (
"If you do not want to or can not talk to {recipient_name}, just output NOTHING"
)
messages = [
SystemMessagePromptTemplate.from_template(
"You are the AI behind a NPC character called {initiator_name}"
),
HumanMessagePromptTemplate.from_template(instruct),
]
observation = "\n".join(observations)

message = (
LLMChain(
llm=initiator.llm,
prompt=ChatPromptTemplate.from_messages(messages),
)
.run(
observation=observation,
initiator_name=initiator.name,
recipient_name=recipient.name,
)
.strip()
)
if "NOTHING" in message:
return ""

messages.append(AIMessagePromptTemplate.from_template(message))
messages.append(
HumanMessagePromptTemplate.from_template(
"Did {initiator_name} talk to {recipient_name}, please answer yes or no"
)
)
resp = (
LLMChain(
llm=initiator.llm,
prompt=ChatPromptTemplate.from_messages(messages),
)
.run(
observation=observation,
initiator_name=initiator.name,
recipient_name=recipient.name,
)
.strip()
)
if "no" in resp:
return ""

return message
41 changes: 1 addition & 40 deletions skyagi/src/skyagi/skyagi.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os
from typing import List

import openai
from langchain.chat_models import ChatOpenAI
from rich.console import Console
from rich.prompt import Prompt
Expand All @@ -12,48 +11,10 @@
create_new_memory_retriever,
interview_agent,
run_conversation,
talks_to,
)


# whether amy wants to talk to bob based on the observations
def talks_to(
amy: GenerativeAgent, bob: GenerativeAgent, observations: List[str]
) -> str:
instruct = "Here are the timeline of events happened for these NPC characters:\n"
instruct += "\n".join(observations)
instruct += "\n"
instruct += (
f"I want to you to behave as {amy.name} and talk to me as I am {bob.name}.\n"
)
instruct += (
f"If you do not want to or can not talk to {bob.name}, just output NOTHING"
)

prompts = [
{
"role": "system",
"content": f"You are the AI behind a NPC character called {amy.name}",
},
{"role": "user", "content": instruct},
]
resp = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=prompts)
message = resp["choices"][0]["message"]["content"]
if "NOTHING" in message:
return ""

prompts.append(resp["choices"][0]["message"])
prompts.append(
{
"role": "user",
"content": f"Did {amy.name} talk to {bob.name}, please answer yes or no",
}
)
resp = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=prompts)
if "no" in resp["choices"][0]["message"]["content"]:
return ""
return message


def user_robot_conversation(agent_to_interview: GenerativeAgent, ctx: Context):
ctx.console.print(
f"Interview with {agent_to_interview.name} start, input empty line to exit",
Expand Down

1 comment on commit 5bee64b

@vercel
Copy link

@vercel vercel bot commented on 5bee64b May 4, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

skyagi – ./

skyagi-sky-agi.vercel.app
skyagi.vercel.app
app.skyagi.ai
skyagi-git-main-sky-agi.vercel.app

Please sign in to comment.