-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
55 lines (43 loc) · 1.69 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import streamlit as st
import ollama
# Set page title
st.session_state.app_name = "Private Chat"
st.title(f"{st.session_state.app_name}")
# Add BOT (LLM) greeting (if needed)
if "messages" not in st.session_state:
st.session_state["messages"] = [{"role": "assistant", "content": "Hi There, how can I help you?"}]
# Write message history
for msg in st.session_state.messages:
if msg["role"] == "user":
st.chat_message(msg["role"]).write(msg["content"])
else:
st.chat_message(msg["role"]).write(msg["content"])
options = {
'num_ctx': 4096, # default is 2048
'temperature': 0.2, # default is 0.8
'top_k': 15, # default is 40
'top_p': 0.5, #default is 0.9
}
# Generate a response (async) using an LLM
def generate_response():
response = ollama.chat(model='llama3.2', stream=True, messages=st.session_state.messages, options=options)
for partial_resp in response:
try:
token = partial_resp["message"]["content"]
st.session_state["full_message"] += token
yield token
except Exception as inst:
print("---> " + inst)
def main():
# Handle user input
if prompt := st.chat_input("Enter your message here..."):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
# Clear the previous message
st.session_state["full_message"] = ""
# Write the generated response
st.chat_message("assistant").write_stream(generate_response)
# Update message history
st.session_state.messages.append({"role": "assistant", "content": st.session_state["full_message"]})
if __name__ == "__main__":
main()