Skip to content

Commit

Permalink
Add GPT-3 and Llama 2 7B chatbot functionality
Browse files Browse the repository at this point in the history
  • Loading branch information
Mark Chen authored and Mark Chen committed Feb 7, 2024
1 parent e2be12d commit 4e1ab80
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 9 deletions.
44 changes: 37 additions & 7 deletions src/app/main.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,44 @@
import gradio as gr
import requests

def echo(message, history):
is_gpt3 = False


def echo_gptchatbot(message, history):
# Post the message to the server
response = requests.post(
"http://127.0.0.1:8000/api/chat/gpt3", data={"user_request": message}
)
# Return the response
llm_output = response.json()["result"]["content"]

return llm_output


def echo_llamachatbot(message, history):
# Post the message to the server
response = requests.post("http://127.0.0.1:8000/api/chat/gpt3", data={"user_request": message})
response = requests.post(
"http://127.0.0.1:8000/api/chat/llama", data={"user_request": message}
)

# Return the response
llm_output = response.json()["result"]['content']
llm_output = response.json()["result"][0]["generated_text"]

return llm_output

demo = gr.ChatInterface(fn=echo, examples=["What is OpenAI?", "What is LLM?"], title="LLM Chatbot")

demo.launch()

# Create a Gradio interface with the chatbot
if is_gpt3:
demo = gr.ChatInterface(
fn=echo_gptchatbot,
examples=["What is OpenAI?", "What is GPT-3?"],
title="GPT-3 Chatbot",
)
else:
demo = gr.ChatInterface(
fn=echo_llamachatbot,
examples=["What is OpenAI?", "What is LLM?"],
title="LLM Chatbot - Llama 2 7B",
)

demo.launch()
5 changes: 3 additions & 2 deletions src/index/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
if isProduction:
app = FastAPI(
title="LLM API Endpoints",
docs_url=None, # Disable docs (Swagger UI)
redoc_url=None, # Disable redoc
docs_url=None, # Disable docs (Swagger UI)
redoc_url=None, # Disable redoc
)
else:
app = FastAPI(title="LLM API Endpoints")
Expand Down Expand Up @@ -40,6 +40,7 @@ async def gpt_chat(user_request: str = Form(...)):

return {"result": result}


@app.post("/api/chat/llama", tags=["Llama 2 7B Chat"])
async def llama_chat(user_request: str = Form(...)):
"""
Expand Down

0 comments on commit 4e1ab80

Please sign in to comment.