Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for ollama, and oh-my-zh plugin loading #5

Open
wants to merge 59 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
1a0fa98
Create zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
d362374
Create zsh-llm-suggestions.plugin.zsh
p1r473 May 3, 2024
ce2fbe3
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
6baaa99
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
6fc92ea
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
8334f6a
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
50b1259
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
3b09a3a
Update README.md
p1r473 May 3, 2024
038b71e
Update zsh-llm-suggestions.zsh
p1r473 May 3, 2024
eeeffdc
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
fc521ff
Update zsh-llm-suggestions-ollama.py
p1r473 May 3, 2024
413b299
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
2b44956
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
21070ca
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
03d9c37
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
f710601
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
bf65c6d
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
a4b087c
Update zsh-llm-suggestions-ollama.py
p1r473 May 4, 2024
ce82fc0
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
2ab16a0
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
86fb8d6
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
1e01157
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
e43f89c
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
d292c66
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
efd18ef
Update zsh-llm-suggestions.zsh
p1r473 May 4, 2024
8d65db6
Update zsh-llm-suggestions.zsh
p1r473 May 6, 2024
8b3f688
Update zsh-llm-suggestions-ollama.py
p1r473 May 12, 2024
be1ce72
Update zsh-llm-suggestions.plugin.zsh
p1r473 May 12, 2024
fe60191
Update zsh-llm-suggestions.zsh
p1r473 May 12, 2024
3366262
Update zsh-llm-suggestions.zsh
p1r473 May 13, 2024
cafd668
Update zsh-llm-suggestions.zsh
p1r473 May 13, 2024
cfac04c
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
2f8607f
hi
p1r473 May 15, 2024
e4def8e
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
2e12c99
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
c9d45d0
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
55cef4c
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
6bdac91
Update zsh-llm-suggestions.zsh
p1r473 May 15, 2024
4021da1
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
3a4267f
Delete zsh-llm-suggestions-ollama.py
p1r473 May 17, 2024
0710612
Create zsh-llm-suggestions-ollama.py
p1r473 May 17, 2024
ca5e76c
Create zsh-llm-suggestions.plugin.zsh
p1r473 May 17, 2024
e0f015d
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
3b3fc24
Update README.md
p1r473 May 17, 2024
35fb9fb
Merge branch 'master' into fix
p1r473 May 17, 2024
5af4ffd
Merge pull request #1 from p1r473/fix
p1r473 May 17, 2024
31730d3
Update zsh-llm-suggestions.zsh
p1r473 May 17, 2024
632c2fe
Add files via upload
p1r473 May 17, 2024
4d84bd8
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
e8db1e3
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
56ee2e6
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
d8da3d7
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
187618f
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
60adfdb
Update zsh-llm-suggestions-ollama.py
p1r473 May 29, 2024
ff55241
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
33d301e
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
1f01599
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
71941d8
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
43e19ba
Update zsh-llm-suggestions-ollama.py
p1r473 Dec 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Create zsh-llm-suggestions-ollama.py
p1r473 authored May 17, 2024
commit 0710612e31630dfc92b491462266c803ef99c62e
174 changes: 174 additions & 0 deletions zsh-llm-suggestions-ollama.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
#!/usr/bin/env python3
import sys
import subprocess
import json
import os
import platform
import distro
import subprocess
import os
import socket
import psutil

def get_system_load():
cpu_usage = psutil.cpu_percent(interval=1)
memory_usage = psutil.virtual_memory().percent
return cpu_usage, memory_usage

def get_shell_version():
result = subprocess.run(["zsh", "--version"], capture_output=True, text=True)
return result.stdout.strip()

def is_user_root():
return os.geteuid() == 0

def get_cpu_architecture():
return platform.machine()

def get_network_info():
hostname = socket.gethostname()
ip_address = socket.gethostbyname(hostname)
return hostname, ip_address

def get_env_vars():
path = os.getenv('PATH')
home = os.getenv('HOME')
ld_library_path = os.getenv('LD_LIBRARY_PATH')
return path, home, ld_library_path

def get_current_username():
return os.environ.get('USER', os.environ.get('USERNAME', 'Unknown User'))

def get_os_info():
try:
# This will work on Linux distributions with the distro module installed
os_id = distro.id()
os_version = distro.version()
os_name = distro.name()
return f"{os_name} ({os_id} {os_version})".strip()
except ModuleNotFoundError:
# Fallback for non-Linux platforms
system = platform.system()
version = platform.version()
return f"{system} {version}".strip()

def filter_non_ascii(text):
return ''.join(char for char in text if ord(char) < 128)

MISSING_PREREQUISITES = "zsh-llm-suggestions missing prerequisites:"

def highlight_explanation(explanation):
try:
import pygments
from pygments.lexers import MarkdownLexer
from pygments.formatters import TerminalFormatter
return pygments.highlight(explanation, MarkdownLexer(), TerminalFormatter(style='material'))
except ImportError:
print(f'echo "{MISSING_PREREQUISITES} Install pygments" && pip3 install pygments')
return explanation # Return unhighlighted text if pygments is not installed

def send_request(prompt, system_message=None, context=None):
server_address = os.environ.get('ZSH_LLM_SUGGESTION_SERVER', 'localhost:11434')
model = os.environ.get('ZSH_LLM_SUGGESTION_MODEL', 'tinyllama')
data = {
"model": model,
"prompt": prompt,
"keep_alive": "30m",
"stream": False
}
if system_message:
data["system"] = system_message
if context:
data["context"] = context

try:
response = subprocess.run(
["curl", "-XPOST", f"http://{server_address}/api/generate", "-H", "Content-Type: application/json", "-d", json.dumps(data)],
capture_output=True,
text=True,
timeout=60
)
if response.stdout:
json_response = json.loads(response.stdout)
return json_response.get('response', 'No response received.'), json_response.get('context', None)
else:
return "No response received.", None
except subprocess.TimeoutExpired:
return "Request timed out. Please try again.", None
except json.JSONDecodeError:
return "Failed to decode the response. Please check the API response format.", None
except Exception as e:
return f"Error: {str(e)}", None

def zsh_llm_suggestions_ollama(prompt, system_message=None, context=None):
try:
result, new_context = send_request(prompt, system_message, context)
return result, new_context
except Exception as e:
print(f"Error: {e}")
return "", None

def main():
mode = sys.argv[1]
if mode not in ['generate', 'explain', 'freestyle']:
print("ERROR: something went wrong in zsh-llm-suggestions, please report a bug. Got unknown mode: " + mode)
return

buffer = sys.stdin.read()
system_message = None
context = None # Initialize context to None

os_info = get_os_info()
shell_version = get_shell_version()
user_is_root = is_user_root()
cpu_arch = get_cpu_architecture()
path, home, ld_library_path = get_env_vars()
username = get_current_username()
freestyle_system_message = os.environ.get('OLLAMA_FREESTYLE_SYSTEM_MESSAGE')

#Unused
#hostname, ip_address = get_network_info()
#cpu_usage, memory_usage = get_system_load()
#Your system is on {hostname} ({ip_address}), with CPU usage at {cpu_usage}% and memory usage at {memory_usage}%

if mode == 'generate':
system_message = f"You are a ZSH shell expert using {os_info} on {cpu_arch}, shell version {shell_version}, running as {'root' if user_is_root else f'non-root as user {username}'}. Please write a ZSH command that solves my query without any additional explanation."
elif mode == 'explain':
system_message = f"You are a ZSH shell expert using {os_info} on {cpu_arch}, shell version {shell_version}, running as {'root' if user_is_root else f'non-root as user {username}'}. Please briefly explain how the given command works. Be as concise as possible using Markdown syntax."
elif mode == 'freestyle':
# Load the previous context only for freestyle mode
try:
with open(os.path.expanduser('~/.ollama_history'), 'r') as file:
file_contents = file.read().strip()
if file_contents:
context = json.loads(file_contents)
except FileNotFoundError:
context = None # Handle the case where the file does not exist
if freestyle_system_message:
system_message = freestyle_system_message
except json.JSONDecodeError:
print("Failed to decode JSON from context file. It may be corrupt or empty.")
context = None
except Exception as e:
print(f"Unexpected error when loading context: {e}")
result, new_context = zsh_llm_suggestions_ollama(buffer, system_message, context)
result=filter_non_ascii(result)
if mode == 'freestyle':
# Save the new context only for freestyle mode
try:
with open(os.path.expanduser('~/.ollama_history'), 'w') as file:
if new_context is not None:
file.write(json.dumps(new_context))
except Exception as e:
print(f"Error saving context: {e}")

if mode == 'generate':
result = result.replace('```bash', '').replace('```zsh', '').replace('```', '').strip()
print(result)
elif mode == 'explain':
print(highlight_explanation(result))
elif mode == 'freestyle':
print(result)

if __name__ == '__main__':
main()