Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for ollama, and oh-my-zh plugin loading #5

Open
wants to merge 59 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
1a0fa98
Create zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
d362374
Create zsh-llm-suggestions.plugin.zsh
p1r473 May 3, 2024
ce2fbe3
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
6baaa99
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
6fc92ea
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
8334f6a
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
50b1259
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
3b09a3a
Update README.md
p1r473 May 3, 2024
038b71e
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
eeeffdc
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
fc521ff
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
413b299
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
2b44956
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
21070ca
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
03d9c37
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
f710601
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
bf65c6d
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
a4b087c
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
ce82fc0
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
2ab16a0
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
86fb8d6
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
1e01157
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
e43f89c
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
d292c66
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
efd18ef
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
8d65db6
Update zsh-llm-suggestions.zsh
p1r473 May 6, 2024
8b3f688
Update zsh-llm-suggestions-ollama.py
p1r473 May 12, 2024
be1ce72
Update zsh-llm-suggestions.plugin.zsh
p1r473 May 12, 2024
fe60191
Update zsh-llm-suggestions.zsh
p1r473 May 12, 2024
3366262
Update zsh-llm-suggestions.zsh
p1r473 May 13, 2024
cafd668
Update zsh-llm-suggestions.zsh
p1r473 May 13, 2024
cfac04c
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
2f8607f
hi
p1r473 May 15, 2024
e4def8e
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
2e12c99
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
c9d45d0
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
55cef4c
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
6bdac91
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
4021da1
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
3a4267f
Delete zsh-llm-suggestions-ollama.py
p1r473 May 17, 2024
0710612
Create zsh-llm-suggestions-ollama.py
p1r473 May 17, 2024
ca5e76c
Create zsh-llm-suggestions.plugin.zsh
p1r473 May 17, 2024
e0f015d
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
3b3fc24
Update README.md
p1r473 May 17, 2024
35fb9fb
Merge branch 'master' into fix
p1r473 May 17, 2024
5af4ffd
Merge pull request #1 from p1r473/fix
p1r473 May 17, 2024
31730d3
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
632c2fe
Add files via upload
p1r473 May 17, 2024
4d84bd8
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
e8db1e3
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
56ee2e6
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
d8da3d7
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
187618f
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
60adfdb
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
ff55241
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
33d301e
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
1f01599
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
71941d8
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
43e19ba
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update zsh-llm-suggestions-ollama.py
p1r473 authored Dec 16, 2024
commit 1f015992d668e0c7e0ef96fe43ab14b1dd512267
60 changes: 49 additions & 11 deletions zsh-llm-suggestions-ollama.py
Original file line number Diff line number Diff line change
@@ -86,20 +86,51 @@ def highlight_explanation(explanation):

def send_request(prompt, system_message=None, context=None):
server_address = os.environ.get('ZSH_LLM_SUGGESTION_SERVER', 'localhost:11434')
model = os.environ.get('ZSH_LLM_SUGGESTION_MODEL', 'tinyllama')
num_ctx = os.environ.get('ZSH_LLM_SUGGESTION_NUM_CTX', '2056')
model = os.environ.get('ZSH_LLM_SUGGESTION_MODEL', 'llama3.3') # Default model

# Base request data
data = {
"model": model,
"prompt": prompt,
"num_ctx": int(num_ctx),
"keep_alive": "30m",
"stream": False
}

if system_message:
data["system"] = system_message
if context:
# Optional parameters: Check and add if set
optional_params = [
"num_ctx", # Context length
"temperature", # Sampling randomness
"top_k", # Top-K sampling
"top_p", # Nucleus sampling
"repeat_penalty", # Penalizes repetition
"frequency_penalty", # Penalizes frequent tokens
"presence_penalty", # Penalizes new tokens based on presence
"mirostat", # Mirostat sampling
"mirostat_tau", # Mirostat parameter
"mirostat_eta", # Mirostat parameter
"stop" # Stop sequences
]

for param in optional_params:
value = os.environ.get(f"ZSH_LLM_SUGGESTION_{param.upper()}")
if value is not None:
# Convert numeric values appropriately
if param in ["temperature", "top_p", "repeat_penalty", "frequency_penalty", "presence_penalty", "mirostat_tau", "mirostat_eta"]:
data[param] = float(value)
elif param in ["top_k", "mirostat", "num_ctx"]:
data[param] = int(value)
elif param == "stop":
# Handle stop sequences as a JSON array if provided
try:
data[param] = json.loads(value) if value.startswith("[") else value
except json.JSONDecodeError:
print(f"Invalid JSON format for {param}, skipping.")
else:
data[param] = value

# Handle context if enabled
use_context = os.environ.get('ZSH_LLM_SUGGESTION_USE_CONTEXT', 'true').lower() == 'true'
if context and use_context:
data["context"] = context

try:
@@ -109,17 +140,24 @@ def send_request(prompt, system_message=None, context=None):
text=True,
timeout=60
)
if response.returncode != 0:
return f"Curl error: {response.stderr.strip()}", None

if response.stdout:
json_response = json.loads(response.stdout)
return json_response.get('response', 'No response received.'), json_response.get('context', None)
try:
json_response = json.loads(response.stdout)
if "error" in json_response:
return f"Error from server: {json_response['error']}", None
return json_response.get('response', 'No response received.'), json_response.get('context', None)
except json.JSONDecodeError:
return f"Invalid JSON response: {response.stdout.strip()}", None
else:
return "No response received.", None

except subprocess.TimeoutExpired:
return "Request timed out. Please try again.", None
except json.JSONDecodeError:
return "Failed to decode the response. Please check the API response format.", None
except Exception as e:
return f"Error: {str(e)}", None
return f"Unexpected error: {str(e)}", None

def zsh_llm_suggestions_ollama(prompt, system_message=None, context=None):
try: