forked from langchain-ai/langsmith-cookbook
-
Notifications
You must be signed in to change notification settings - Fork 0
/
expression_chain.py
45 lines (40 loc) · 1.38 KB
/
expression_chain.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from datetime import datetime
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.schema.runnable import Runnable, RunnableMap
from langchain.memory import ConversationBufferMemory
def get_expression_chain(
system_prompt: str, memory: ConversationBufferMemory
) -> Runnable:
"""Return a chain defined primarily in LangChain Expression Language"""
ingress = RunnableMap(
{
"input": lambda x: x["input"],
"chat_history": lambda x: memory.load_memory_variables(x)["chat_history"],
"time": lambda _: str(datetime.now()),
}
)
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
system_prompt + "\nIt's currently {time}.",
),
MessagesPlaceholder(variable_name="chat_history"),
("human", "{input}"),
]
)
llm = ChatOpenAI(temperature=0.7)
chain = ingress | prompt | llm
return chain
if __name__ == "__main__":
chain, _ = get_expression_chain()
in_ = "Hi there, I'm a human!"
print(in_)
for chunk in chain.stream({"input": in_}):
print(chunk.content, end="", flush=True)
in_ = "What's your name?"
print()
print(in_)
for chunk in chain.stream({"input": in_}):
print(chunk.content, end="", flush=True)