Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Wrapped content to distinguish prompt and content #152

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 14 additions & 12 deletions GPT/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def gpt_blend(source_text: str, destination_text: str):
Please return only the final text. What follows is all of the source texts separated by '---'.
"""

result = gpt_query(format_message(prompt), format_message(source_text))
result = gpt_query(format_message(prompt), format_message(source_text, True))
actions.user.gpt_insert_response(extract_message(result), "paste")

def gpt_blend_list(source_text: list[str], destination_text: str):
Expand All @@ -70,7 +70,9 @@ def gpt_generate_shell(text_to_process: str) -> str:
Condense the code into a single line such that it can be ran in the terminal.
"""

result = gpt_query(format_message(prompt), format_message(text_to_process))
result = gpt_query(
format_message(prompt), format_message(text_to_process, True)
)
return result.get("text", "")

def gpt_generate_sql(text_to_process: str) -> str:
Expand All @@ -82,9 +84,9 @@ def gpt_generate_sql(text_to_process: str) -> str:
Do not output comments, backticks, or natural language explanations.
Prioritize SQL queries that are database agnostic.
"""
return gpt_query(format_message(prompt), format_message(text_to_process)).get(
"text", ""
)
return gpt_query(
format_message(prompt), format_message(text_to_process, True)
).get("text", "")

def gpt_clear_context():
"""Reset the stored context"""
Expand Down Expand Up @@ -154,7 +156,7 @@ def gpt_run_prompt(destination: str, prompt: str, source: str) -> str:
# Handle special cases in the prompt
### Ask is a special case, where the text to process is the prompted question, not selected text
if prompt.startswith("ask"):
text_to_process = format_message(prompt.removeprefix("ask"))
text_to_process = format_message(prompt.removeprefix("ask"), True)
prompt = "Generate text that satisfies the question or request given in the input."

response = gpt_query(format_message(prompt), text_to_process, destination)
Expand Down Expand Up @@ -184,7 +186,7 @@ def gpt_reformat_last(how_to_reformat: str):
last_output = actions.user.get_last_phrase()
if last_output:
actions.user.clear_last_phrase()
return gpt_query(format_message(PROMPT), format_message(last_output))
return gpt_query(format_message(PROMPT), format_message(last_output, True))
else:
notify("No text to reformat")
raise Exception("No text to reformat")
Expand Down Expand Up @@ -267,26 +269,26 @@ def gpt_get_source_text(spoken_text: str) -> GPTMessageItem:
raise Exception(
"GPT Failure: User applied a prompt to the phrase context, but there was no context stored"
)
return format_message(messages_to_string(GPTState.context))
return format_message(messages_to_string(GPTState.context), True)
case "thread":
# TODO: Do we want to throw an exception here if the thread is empty?
return format_message(thread_to_string(GPTState.thread))
return format_message(thread_to_string(GPTState.thread), True)
case "gptResponse":
if GPTState.last_response == "":
raise Exception(
"GPT Failure: User applied a prompt to the phrase GPT response, but there was no GPT response stored"
)
return format_message(GPTState.last_response)
return format_message(GPTState.last_response, True)

case "lastTalonDictation":
last_output = actions.user.get_last_phrase()
if last_output:
actions.user.clear_last_phrase()
return format_message(last_output)
return format_message(last_output, True)
else:
notify("GPT Failure: No last dictation to reformat")
raise Exception(
"GPT Failure: User applied a prompt to the phrase last Talon Dictation, but there was no text to reformat"
)
case "this" | _:
return format_message(actions.edit.selected_text())
return format_message(actions.edit.selected_text(), True)
10 changes: 8 additions & 2 deletions lib/modelHelpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,13 @@ def format_messages(
}


def format_message(content: str) -> GPTMessageItem:
def format_message(content: str, wrapContent: bool = False) -> GPTMessageItem:
if wrapContent:
content = f"""
\"\"\"
{content}
\"\"\"
"""
return {"type": "text", "text": content}


Expand All @@ -87,7 +93,7 @@ def format_clipboard() -> GPTMessageItem:
"User requested info from the clipboard but there is nothing in it"
)

return format_message(clip.text()) # type: ignore Unclear why this is not narrowing the type
return format_message(clip.text(), True) # type: ignore Unclear why this is not narrowing the type


def send_request(
Expand Down