diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f8cba3d --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +__pycache__ +.vscode +.ruff_cache +dist +clipea.egg-info +build +.DS_Store \ No newline at end of file diff --git a/README.md b/README.md index 4907325..b82a56e 100644 --- a/README.md +++ b/README.md @@ -157,21 +157,24 @@ Clipea doesn't have any context of what it said before, though this may be added ## 📦 Installation and setup -### Mac +### Manual installation -[![Install with Homebrew](https://img.shields.io/badge/Homebrew-Tap-blue.svg)](https://github.com/dave1010/homebrew-clipea) +Python >=3.10 is required. - brew tap dave1010/clipea - brew install clipea - clipea setup +You can use the provided `setup.py`. ([setuptools docs](https://setuptools.pypa.io/en/latest/deprecated/easy_install.html)) + +You can install it quickly like so: + + python3 setup.py sdist + pip install dist/clipea-{version}.tar.gz + +Or development mode: + + pip install -e . -### Manual install +### With PyPi (soon) - pip install llm - git clone https://github.com/dave1010/clipea.git - cd clipea - ./clipea setup - ./clipea add current dir to my path on shell login + #pip install clipea ### Zsh Shell integration and Alias @@ -182,7 +185,7 @@ Clipea doesn't have any context of what it said before, though this may be added ## Internals -Clipea is currently written in PHP but may switch to Python ([#3](https://github.com/dave1010/clipea/issues/3)). +Clipea is written in Python (3.10+). Clipea uses [llm](https://github.com/simonw/llm) to interact with large language models. diff --git a/clipea b/clipea deleted file mode 100755 index 5da1420..0000000 --- a/clipea +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env php - trim(shell_exec('ps -o comm= -p $(ps -o ppid= -p $(ps -o ppid= -p $$))')), - 'os' => PHP_OS, - 'editor' => getenv('EDITOR') ?? 'nano' - // 'cwd' => getcwd(), -]; - -$systemPromptFile = CLIPEA_DIR . '/system-prompt.txt'; -if (file_exists($_SERVER['HOME'] . '/.config/clipea/system-prompt.txt')) { - $systemPromptFile = $_SERVER['HOME'] . '/.config/clipea/system-prompt.txt'; -} -$system = file_get_contents($systemPromptFile) . json_encode($env); - - -function get_input(array $argv) { - $argv = array_slice($argv, 1); - $prompt = implode(' ', $argv); - - $stdin = get_stdin(); - if ($stdin) { - if (strlen($stdin) > 8192) { - die("Error: Input too long! Maximum 8192 characters allowed. Try limiting your input using 'head -c 8000 file.txt'."); - } - $prompt .= "\n~~~DATA~~~\n" . $stdin; - } - - return $prompt; -} - -$prompt = get_input($argv); - -switch($prompt) { - case 'alias': - require_once CLIPEA_DIR . "/route/alias.php"; - // no exit - break; - case 'help': - case '--help': - case '-h': - require_once CLIPEA_DIR . "/route/help.php"; - exit; - case '': - echo "Error: no query specified\n"; - exit; - case 'env': - require_once CLIPEA_DIR . "/route/env.php"; - exit; - case 'setup': - require_once CLIPEA_DIR . "/route/setup.php"; - exit; -} - -if (!shell_exec('which llm')) { - die("Error: dependency 'llm' not found. Run 'clipea setup' to install\n"); -} - -// GPT-4 mode -$llmOpts = ''; -if (substr($prompt, 0, 2) === '4 ') { - $llmOpts = ' -m 4 '; - $prompt = substr($prompt, 2); -} - -echo "📎🟢 "; - - -// Look how simple this is! -// All it does is stream characters from the llm process and let you execute some of them -$line = ''; -foreach (get_llm_response($system, $prompt, $llmOpts) as $char) { - echo $char; - $line .= $char; - - if ($char !== PHP_EOL) { - continue; - } - - if (substr($line, 0, 2) === '$ ') { - maybe_passthru(substr($line, 2, -1)); // remove "$ " and \n - } - - $line = ''; -} diff --git a/clipea.zsh b/clipea.zsh old mode 100755 new mode 100644 index 155d6c9..99170e0 --- a/clipea.zsh +++ b/clipea.zsh @@ -1,24 +1,4 @@ #!/bin/zsh -if [[ $ZSH_EVAL_CONTEXT != 'toplevel:file' ]]; then - echo "Error: Source the script instead of executing it:" - echo - echo "source $0" - return 1 2>/dev/null || exit 1 -fi - -DIR="$(dirname -- "$0")" - -TMP_FILE=$(mktemp) - -# Execute the PHP script with an environment variable -COMMAND_OUTPUT_FILE="$TMP_FILE" "$DIR/clipea" "$@" - -# Read the command to be placed on the Zsh command line -commandToPlace=$(< "$TMP_FILE") - -# Place it on the Zsh command line -print -rz "$commandToPlace" - -# Remove the temp file -rm "$TMP_FILE" +echo "Clipea has been updated." +echo "Please run 'clipea alias' or reinstall to upgrade." diff --git a/clipea/__init__.py b/clipea/__init__.py new file mode 100644 index 0000000..e9930aa --- /dev/null +++ b/clipea/__init__.py @@ -0,0 +1,19 @@ +"""Clipea application +📎🟢 Like Clippy but for the CLI. A blazing fast AI helper for your command line +""" +import os +from clipea import utils, cli + + +CLIPEA_DIR: str = os.path.dirname(os.path.realpath(__file__)) +USAGE_FILE_PATH: str = CLIPEA_DIR + "/usage.txt" +HOME_PATH: str = utils.anystr_force_str(os.path.expanduser("~")) +SYSTEM_PROMPT_FILE: str = utils.get_config_file_with_fallback( + home=HOME_PATH, fallback=CLIPEA_DIR, appname="clipea", filename="system-prompt.txt" +) +ENV: dict[str, str] = { + "shell": cli.get_shell(), + "os": os.name, + "editor": os.getenv("EDITOR", "nano"), +} +SYSTEM_PROMPT: str = utils.read_file(SYSTEM_PROMPT_FILE) + str(ENV) diff --git a/clipea/__main__.py b/clipea/__main__.py new file mode 100644 index 0000000..b9a2dd2 --- /dev/null +++ b/clipea/__main__.py @@ -0,0 +1,17 @@ +"""Clipea application entry point +""" +import sys +import shutil +from clipea import router + + +def clipea_main() -> None: + if shutil.which("llm") is None: + sys.exit('Error: dependency "llm" not found. Run "clipea setup" to install') + + USER_PROMPT = " ".join(sys.argv[1:]) + router.commands_router(USER_PROMPT) + + +if __name__ == "__main__": + clipea_main() diff --git a/clipea/cli.py b/clipea/cli.py new file mode 100644 index 0000000..e6bfc64 --- /dev/null +++ b/clipea/cli.py @@ -0,0 +1,48 @@ +"""CLI +Interactions with the terminal +""" +import os +import sys +import subprocess +import shutil + + +def get_input(max_len: int = 1 << 13) -> str: + """Get user data and do length check on it + + Returns: + str: user input + """ + data: str = "" + if not sys.stdin.isatty(): + data = input() + if len(data) > max_len: + raise ValueError( + f"Error: Input too long! Maximum {max_len} characters allowed.\ + Try limiting your input using 'head -c {max_len} file.txt" + ) + return data + + +def get_shell() -> str: + """Get user's default shell + + Returns: + str: shell's name + """ + + return ( + os.popen("ps -o comm= -p $(ps -o ppid= -p $(ps -o ppid= -p $$))").read().strip() + ) + + +def execute_with_prompt(cmd: str, shell: str = None) -> None: + """Asks the user if he wants to execute a command, executes it if so + + Args: + cmd (str): command to execute + shell (str, optional): to execute with a particuliar shell. Defaults to None. + """ + answer = input("\033[0;36mExecute? [y/N] \033[0m").strip().lower() + if sys.stdin.isatty() and answer == "y": + subprocess.run(cmd, shell=True, executable=shutil.which(shell), check=False) diff --git a/clipea/clipea.zsh b/clipea/clipea.zsh new file mode 100755 index 0000000..c48542b --- /dev/null +++ b/clipea/clipea.zsh @@ -0,0 +1,34 @@ +#!/bin/zsh + +if [[ $ZSH_EVAL_CONTEXT != 'toplevel:file' ]]; then + echo "Error: Source the script instead of executing it:" + echo + echo "source $0" + return 1 2>/dev/null || exit 1 +fi + +CLIPEA_TMP_FILE=$(mktemp) + +# https://stackoverflow.com/questions/9901210/bash-source0-equivalent-in-zsh +CLIPEA_SCRIPT_DIR=$(dirname $(readlink -f ${(%):-%x})) + +CLIPEA_PYTHON= + +CLIPEA_PATH=$(which clipea) + +# Run clipea from the current dir if possible +if [[ -f $CLIPEA_SCRIPT_DIR/__main__.py ]]; then + CLIPEA_PATH=$CLIPEA_SCRIPT_DIR + CLIPEA_PYTHON="$(which python3 || which python)" +fi + +# Execute clipea with an environment variable +CLIPEA_CMD_OUTPUT_FILE="$CLIPEA_TMP_FILE" $CLIPEA_PYTHON "$CLIPEA_PATH" "$@" + +# Read the command to be placed on the Zsh command line +CLIPEA_COMMAND_TO_PLACE=$(< "$CLIPEA_TMP_FILE") + +# Place it on the Zsh command line +print -rz "$CLIPEA_COMMAND_TO_PLACE" + +rm "$CLIPEA_TMP_FILE" diff --git a/clipea/clipea_llm.py b/clipea/clipea_llm.py new file mode 100644 index 0000000..144e4d2 --- /dev/null +++ b/clipea/clipea_llm.py @@ -0,0 +1,91 @@ +"""LLM +Interactions with `llm` python library +""" +import os +import llm.cli +import llm +import clipea.cli +from clipea import ENV, HOME_PATH, CLIPEA_DIR, utils + + +def init_llm(llm_model: str = "") -> llm.Model: + """Initialize base llm library with user's `llm_model` + + Args: + llm_model (str, optional): LLM model name (ex: "gpt-4"). + Defaults to content of {clipea config}/clipea_default_model.txt. + + Returns: + llm.Model + """ + clipea_default_model_path = utils.get_config_file_with_fallback( + home=HOME_PATH, + fallback=CLIPEA_DIR, + appname="clipea", + filename="clipea_default_model.txt", + ) + model = llm.get_model( + llm_model or llm.cli.get_default_model(filename=clipea_default_model_path) + ) + + if model.needs_key: + model.key = llm.get_key("", model.needs_key, model.key_env_var) + return model + + +def stream_commands(response: llm.Response, command_prefix: str = "") -> None: + """Streams llm response which returns shell commands + If a valid shell commands is returned, either prompt to execute it or + put it in zsh's command buffer + The processing is done internally with a nested function `process_command` + A command is considered valid if it starts with '$ ' and is a full line of answer + + Args: + response (llm.Response): LLM's answer to user's prompt + command_prefix (str, optional): What to write before streaming the commands. Defaults to "". + """ + command: str = "" + output_file: str = os.getenv("CLIPEA_CMD_OUTPUT_FILE") + buffer: str = "" + new_line_pos: int + + def process_command(): + nonlocal command, buffer, new_line_pos + + current_command: str + if new_line_pos > 0: + current_command = command[2:new_line_pos] + else: + current_command = command[2:] + command = command[new_line_pos + 1 :] + + if output_file is not None: + buffer += current_command + os.linesep + else: + clipea.cli.execute_with_prompt(current_command, shell=ENV["shell"]) + + print(command_prefix, end="") + for chunk in response: + print(chunk, end="", flush=True) + command += chunk + + if (new_line_pos := command.find(os.linesep)) == -1: + continue + if command.startswith("$ "): + process_command() + else: + command = "" + + # llm CLI put a line feed manually to it's response, but not it's library + # We have to do this to manage the case where the model returns a + # non-linefeed terminated string. + # It also explains why there is a capturing nested function `process_command` + if command.startswith("$ "): + print() + process_command() + + if output_file: + utils.write_to_file( + output_file, + ';\ '.join(buffer.rstrip(os.linesep).split(os.linesep)) + os.linesep, + ) diff --git a/clipea/commands.py b/clipea/commands.py new file mode 100644 index 0000000..e049666 --- /dev/null +++ b/clipea/commands.py @@ -0,0 +1,73 @@ +"""Commands +Commands with a bit more logic than a few lines are stored there +""" +import sys +import json +from clipea import ENV, SYSTEM_PROMPT, CLIPEA_DIR, cli + + +def setup(): + """Checks if `llm` has an openai key and prompt to change it or create one""" + import llm.cli + + should_setup = True + path = llm.cli.user_dir() / "keys.json" + if path.exists(): + keys = json.loads(path.read_text()) + should_setup = "openai" not in keys.keys() + + if should_setup: + print( + "Get an OpenAI API key from: https://platform.openai.com/account/api-keys" + ) + else: + print("An OpenAI key is already set-up, proceed if you want to change it.") + llm.cli.keys_set() + + +def clipea_execute_prompt(user_prompt: str): + """Clipea's logic on prompt. + Structure all user input as so: + + + ~~~DATA~~~ + + + Sends it to `llm`, stream the responses and prompt if the user wants + to execute them. If zsh extension is enabled, it will be put into + zsh's buffer + + Args: + user_prompt (str): user command input + """ + from clipea import clipea_llm + from llm import Model, Response + + llm_name:str = '' + if user_prompt.startswith('4 '): + user_prompt = user_prompt[2:] + llm_name = 'gpt-4' + + try: + model: Model = clipea_llm.init_llm(llm_name) + except Exception as e: + sys.exit(str(e)) + + user_data: str = cli.get_input() + response: Response = model.prompt( + system=SYSTEM_PROMPT, + prompt=user_prompt + (("\n~~~DATA~~~\n" + user_data) if user_data else ""), + ) + clipea_llm.stream_commands(response, command_prefix="📎🟢") + + +def alias(): + """Gives zsh's alias (automatic command buffering) commands to the user""" + shell: str = ENV["shell"] + if shell == "zsh" or shell == "-zsh": + command: str = f"alias '??'='source {CLIPEA_DIR}/clipea.zsh'" + user_prompt: str = f"Append this line to my {shell} startup file, \ + watching out for quotes and escaping, then explain how to manually source it: {command}" + clipea_execute_prompt(user_prompt) + else: + print(f"`alias` feature is only for zsh users. Current shell = {shell}") diff --git a/clipea/router.py b/clipea/router.py new file mode 100644 index 0000000..dff738c --- /dev/null +++ b/clipea/router.py @@ -0,0 +1,29 @@ +"""clipea +Base application logic +""" +import sys +from pprint import pprint +from clipea import ENV, USAGE_FILE_PATH, commands, utils + + +def commands_router(user_prompt: str) -> None: + """Executes the correct behavior depending on user input + + Args: + user_prompt (str): user input + """ + args: list[str] = user_prompt.split() + if len(args) == 0: + sys.exit("No query specified") + + match args[0]: + case "alias": + commands.alias() + case "env": + pprint(ENV) + case "setup": + commands.setup() + case "-h" | "--help" | "help": + print(utils.read_file(USAGE_FILE_PATH)) + case _: + commands.clipea_execute_prompt(user_prompt) diff --git a/system-prompt.txt b/clipea/system-prompt.txt similarity index 100% rename from system-prompt.txt rename to clipea/system-prompt.txt diff --git a/usage.txt b/clipea/usage.txt similarity index 100% rename from usage.txt rename to clipea/usage.txt diff --git a/clipea/utils.py b/clipea/utils.py new file mode 100644 index 0000000..1fe54f2 --- /dev/null +++ b/clipea/utils.py @@ -0,0 +1,64 @@ +"""Utils +utils for the clipea application +""" +from typing import AnyStr +from pathlib import Path + + +def anystr_force_str(value: AnyStr) -> str: + """Takes any AnyStr and gives back str + + Args: + value (AnyStr) + + Returns: + str: AnyStr's bytes decoded to str or it's str + """ + return value.decode("utf-8") if isinstance(value, bytes) else value + +def read_file(file_path: str) -> str: + """Reads a file + + Args: + file_path (str) + + Returns: + str: file's content + """ + with open(file_path, encoding="utf-8") as f: + return anystr_force_str(f.read()) + + +def get_config_file_with_fallback( + home: str, fallback: str, appname: str, filename: str +) -> str: + """Returns opinionated config file path + + Args: + home (str): user's home + fallback (str): fallback in case the file doesn't exist + appname (str): your app name + filename (str): file you're trying to get + + Returns: + str: {home}/.config/{appname}/{filename} if it exists, else {fallback}/{filename} + """ + config_path_obj: Path + if (config_path_obj := Path(home + f"/.config/{appname}/{filename}")).is_file(): + return str(config_path_obj) + return fallback + f"/{filename}" + + +def write_to_file(file_path: str, content: AnyStr, mode: str = "w") -> None: + """Write to file + + Args: + file_path (str) + content (AnyStr) + mode (str, optional): Defaults to "w". + + Returns: + _type_: _description_ + """ + with open(file_path, mode, encoding="utf-8") as f: + f.write(content) diff --git a/lib/cli.php b/lib/cli.php deleted file mode 100644 index 2fd5c03..0000000 --- a/lib/cli.php +++ /dev/null @@ -1,52 +0,0 @@ - ['pipe', 'w'], // stdout - ]; - - $pipes = []; - $process = proc_open($cmd, $descriptorspec, $pipes); - - if (is_resource($process)) { - while (!feof($pipes[1])) { - yield fread($pipes[1], $chunkSize); - } - - // Clean up - fclose($pipes[1]); - proc_close($process); - } -} - -function maybe_passthru($cmd) { - // Zsh magic - $outputFile = getenv('COMMAND_OUTPUT_FILE'); - if ($outputFile) { - // Let clipea.zsh handle running the command - file_put_contents($outputFile, $cmd); - exit; - } - - - echo "\033[0;36mExecute? [y/N] \033[0m"; - - $input = strtolower(trim(fgets(STDIN))); - - if ($input !== 'y') { - return; - } - - passthru($cmd); -} - -function get_stdin() { - $dataFromStdin = ""; - if (!posix_isatty(STDIN)) { - while (!feof(STDIN)) { - $dataFromStdin .= fgets(STDIN); - } - } - return $dataFromStdin; -} \ No newline at end of file diff --git a/lib/llm.php b/lib/llm.php deleted file mode 100644 index 1701c31..0000000 --- a/lib/llm.php +++ /dev/null @@ -1,8 +0,0 @@ -&1"; - yield from stream_exec($cmd, 1); - yield PHP_EOL; -} diff --git a/route/alias.php b/route/alias.php deleted file mode 100644 index b4dd3e0..0000000 --- a/route/alias.php +++ /dev/null @@ -1,20 +0,0 @@ -&1', $whichLlmOutput); - -if (empty($whichLlmOutput)) { - die("Error: llm not available. Install with 'pip install llm' or 'brew install llm'\n"); -} - -// Check if llm has OpenAI key set -exec('llm keys list 2>&1', $llmKeysOutput); - -if (strpos(implode("\n", $llmKeysOutput), 'openai') === false) { - echo "Get an OpenAI API key from: https://platform.openai.com/account/api-keys\n"; - passthru('llm keys set openai'); -} - -echo "Setup complete!\n"; \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..d8f119d --- /dev/null +++ b/setup.py @@ -0,0 +1,18 @@ +"""Setup file for clipea +""" +from setuptools import setup, find_packages + +setup( + name="clipea", + version="0.1.0", + description=" 📎🟢 Like Clippy but for the CLI. A blazing fast AI helper for your command line ", + url="https://github.com/dave1010/clipea/", + author="Dave Hulbert", + author_email="dave1010@gmail.com", + license="MIT", + packages=find_packages(), + install_requires=["llm"], + python_requires=">=3.10", + package_data={"clipea": ["*.txt", "clipea.zsh"]}, + entry_points={"console_scripts": ["clipea = clipea.__main__:clipea_main"]}, +)