Skip to content

Commit

Permalink
Adds helpers for working with prompts (#87)
Browse files Browse the repository at this point in the history
- Sometimes it's useful to set the prompt to your clipboard or view in
the browser

---------

Co-authored-by: Colton Loftus <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Jul 13, 2024
1 parent ce5719b commit 9ad1833
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 8 deletions.
4 changes: 4 additions & 0 deletions GPT/beta-commands/beta-gpt.talon
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,7 @@ model blend clip:
destination_text = edit.selected_text()
result = user.gpt_blend(clipboard_text, destination_text)
user.gpt_insert_response(result, "")

# Pass the raw text of a prompt to a destination without actually calling GPT with it
model pass <user.modelPrompt> [{user.modelDestination}]:
user.gpt_insert_response(modelPrompt, modelDestination or "")
9 changes: 5 additions & 4 deletions GPT/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,11 @@ def gpt_apply_prompt(
else text_to_process
)

# Apply modifiers to prompt before handling special cases
match modifier:
case "snip":
prompt += "\n\nPlease return the response as a textmate snippet for insertion into an editor with placeholders that the user should edit. Return just the snippet content - no XML and no heading."

# Ask is a special case, where the text to process is the prompted question, not the selected text
if prompt.startswith("ask"):
text_to_process = prompt.removeprefix("ask")
Expand All @@ -162,10 +167,6 @@ def gpt_apply_prompt(
elif prompt == "pass":
return text_to_process

match modifier:
case "snip":
prompt += "\n\nPlease return the response as a textmate snippet for insertion into an editor with placeholders that the user should edit. Return just the snippet content - no XML and no heading."

return gpt_query(prompt, text_to_process)

def gpt_help():
Expand Down
11 changes: 7 additions & 4 deletions GPT/gpt.talon
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,24 @@ model <user.modelPrompt> [{user.modelSource}] [{user.modelDestination}]:
result = user.gpt_apply_prompt(modelPrompt, text)
user.gpt_insert_response(result, modelDestination or "")

# Modifies a model command to be inserted as a snippet instead of a standard paste
# Select the last GPT response so you can edit it further
model take response: user.gpt_select_last()

# Modifies a model command to be inserted as a snippet for VSCode instead of a standard paste
# Otherwise same grammar as standard `model` command
model snip <user.modelPrompt> [{user.modelSource}] [{user.modelDestination}]:
text = user.gpt_get_source_text(modelSource or "")
result = user.gpt_apply_prompt(modelPrompt, text, "snip")
user.gpt_insert_response(result, modelDestination or "", "snip")

# Modifies a model comand to always insert with the text selected
# Useful for chaining together prompts immediately after they return
# Otherwise same grammar as standard `model` command
model chain <user.modelPrompt> [{user.modelSource}] [{user.modelDestination}]:
text = user.gpt_get_source_text(modelSource or "")
result = user.gpt_apply_prompt(modelPrompt, text)
user.gpt_insert_response(result, modelDestination or "", "chain")

# Select the last GPT response so you can edit it further
model take response: user.gpt_select_last()

# Applies an arbitrary prompt from the clipboard to selected text and pastes the result.
# Useful for applying complex/custom prompts that need to be drafted in a text editor.
model apply [from] clip$:
Expand Down

0 comments on commit 9ad1833

Please sign in to comment.