Skip to content

Commit

Permalink
Remove unnecessary system prompt
Browse files Browse the repository at this point in the history
This can be set in the Ollama LLM but is not used by Llama-Index.
  • Loading branch information
jonfairbanks committed Jul 5, 2024
1 parent c831b6a commit 16c226e
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
10 changes: 5 additions & 5 deletions components/tabs/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ def settings():
value=st.session_state["top_k"],
key="top_k",
)
st.text_area(
"System Prompt",
value=st.session_state["system_prompt"],
key="system_prompt",
)
# st.text_area(
# "System Prompt",
# value=st.session_state["system_prompt"],
# key="system_prompt",
# )
st.selectbox(
"Chat Mode",
(
Expand Down
4 changes: 3 additions & 1 deletion utils/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def get_models():
logs.log.error(f"Failed to retrieve Ollama model list: {err}")
return []


###################################
#
# Create Ollama LLM instance
Expand All @@ -111,7 +112,8 @@ def create_ollama_llm(model: str, base_url: str, system_prompt: str = None, requ
- llm: An instance of the Ollama language model with the specified configuration.
"""
try:
Settings.llm = Ollama(model=model, base_url=base_url, system_prompt=system_prompt, request_timeout=request_timeout)
# Settings.llm = Ollama(model=model, base_url=base_url, system_prompt=system_prompt, request_timeout=request_timeout)
Settings.llm = Ollama(model=model, base_url=base_url, request_timeout=request_timeout)
logs.log.info("Ollama LLM instance created successfully")
return Settings.llm
except Exception as e:
Expand Down

0 comments on commit 16c226e

Please sign in to comment.