-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
138 lines (105 loc) · 4.04 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import os
import utils
import streamlit as st
from dotenv import load_dotenv
from htmlchattemolete import css
import pandas as pd
import json
from agent import query_agent, create_agent
def configure():
load_dotenv()
def decode_response(response: str) -> dict:
"""This function converts the string response from the model to a dictionary object.
Args:
response (str): response from the model
Returns:
dict: dictionary with response data
"""
return json.loads(response)
def write_response(response_dict: dict):
"""
Write a response from an agent to a Streamlit app.
Args:
response_dict: The response from the agent.
Returns:
None.
"""
# Check if the response is an answer.
if "answer" in response_dict:
st.write(response_dict["answer"])
# Check if the response is a bar chart.
if "bar" in response_dict:
data = response_dict["bar"]
df = pd.DataFrame(data["data"], columns=data["columns"])
df.set_index(df.columns[0], inplace=True)
st.bar_chart(df)
# Check if the response is a line chart.
if "line" in response_dict:
data = response_dict["line"]
df = pd.DataFrame(data)
df.set_index("columns", inplace=True)
st.line_chart(df)
# Check if the response is a table.
if "table" in response_dict:
data = response_dict["table"]
df = pd.DataFrame(data["data"], columns=data["columns"])
st.table(df)
def process_pdf():
st.subheader("Ask questions to your PDFs :books:")
pdfs = st.file_uploader(
"Upload your PDF and click on 'Process", accept_multiple_files=True
)
try:
if st.button("Process"):
with st.spinner("Processing your PDFs"):
# get text from pdf files
rawtext = utils.get_text_from_file(pdfs)
# split the text into chunk
chunks = utils.get_chunks_from_text(rawtext)
# create the embedding and vector store
vectorstore = utils.get_vectors_from_chunks(chunks)
# create conversation chain
st.session_state.conversation = utils.create_conversation_chain(
vectorstore
)
user_question = st.text_input("Ask a question about your pdfs:")
if user_question is not None and user_question != "":
utils.handle_user_question(user_question)
except:
st.error(
"Oops, there was an error :( Please try again with a different question or upload a different file."
)
def process_csv():
st.subheader("Ask your data 📈")
csv_file = st.file_uploader("Upload a CSV file", type="csv")
user_question = st.text_input("Ask a question about your csv:")
if st.button("Submit Query", type="primary"):
if csv_file is not None and user_question != "":
with st.spinner(text="In progress..."):
try:
agent = create_agent(csv_file)
response = query_agent(agent=agent, query=user_question)
decoded_response = decode_response(response)
write_response(decoded_response)
except:
st.error(
"Oops, there was an error :( Please try again with a different question."
)
def main():
configure()
st.set_page_config(page_title="Personal Copilot", page_icon=":robot:")
st.write(css, unsafe_allow_html=True)
PAGES = {"PDF Chat": process_pdf, "CSV Chat": process_csv}
# intializing the global session variable
if "conversation" not in st.session_state:
st.session_state.conversation = None
if "chat_history" not in st.session_state:
st.session_state.chat_history = None
st.header("Pragnesh's _:red[Copilot]_ 🤖")
with st.sidebar:
st.title("Navigation")
selected_page = st.radio("Select a page", list(PAGES.keys()))
page = PAGES[selected_page]
page()
if __name__ == "__main__":
main()