Skip to content

Commit

Permalink
Feat(Builder): Enhance AITextSummarizerBlock with configurable summar…
Browse files Browse the repository at this point in the history
…y style and focus (#8165)

* feat(platform): Enhance AITextSummarizerBlock with configurable summary style and focus

The AITextSummarizerBlock in the autogpt_platform/backend/backend/blocks/llm.py file has been enhanced to include the following changes:
- Added a new enum class, SummaryStyle, with options for concise, detailed, bullet points, and numbered list styles.
- Added a new input parameter, focus, to specify the topic of the summary.
- Modified the _summarize_chunk method to include the style and focus in the prompt.
- Modified the _combine_summaries method to include the style and focus in the prompt.

These changes allow users to customize the style and focus of the generated summaries, providing more flexibility and control.

* run formatting and linting
  • Loading branch information
Torantulino authored Sep 26, 2024
1 parent b4097f3 commit 41e3c4f
Showing 1 changed file with 12 additions and 6 deletions.
18 changes: 12 additions & 6 deletions autogpt_platform/backend/backend/blocks/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,13 @@ def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "error", str(e)


class SummaryStyle(Enum):
CONCISE = "concise"
DETAILED = "detailed"
BULLET_POINTS = "bullet points"
NUMBERED_LIST = "numbered list"


class AITextSummarizerBlock(Block):
class Input(BlockSchema):
text: str
Expand All @@ -370,6 +377,8 @@ class Input(BlockSchema):
default=LlmModel.GPT4_TURBO,
description="The language model to use for summarizing the text.",
)
focus: str = "general information"
style: SummaryStyle = SummaryStyle.CONCISE
api_key: BlockSecret = SecretField(value="")
# TODO: Make this dynamic
max_tokens: int = 4000 # Adjust based on the model's context window
Expand Down Expand Up @@ -440,7 +449,7 @@ def llm_call(
raise ValueError("Failed to get a response from the LLM.")

def _summarize_chunk(self, chunk: str, input_data: Input) -> str:
prompt = f"Summarize the following text concisely:\n\n{chunk}"
prompt = f"Summarize the following text in a {input_data.style} form. Focus your summary on the topic of `{input_data.focus}` if present, otherwise just provide a general summary:\n\n```{chunk}```"

llm_response = self.llm_call(
AIStructuredResponseGeneratorBlock.Input(
Expand All @@ -454,13 +463,10 @@ def _summarize_chunk(self, chunk: str, input_data: Input) -> str:
return llm_response["summary"]

def _combine_summaries(self, summaries: list[str], input_data: Input) -> str:
combined_text = " ".join(summaries)
combined_text = "\n\n".join(summaries)

if len(combined_text.split()) <= input_data.max_tokens:
prompt = (
"Provide a final, concise summary of the following summaries:\n\n"
+ combined_text
)
prompt = f"Provide a final summary of the following section summaries in a {input_data.style} form, focus your summary on the topic of `{input_data.focus}` if present:\n\n ```{combined_text}```\n\n Just respond with the final_summary in the format specified."

llm_response = self.llm_call(
AIStructuredResponseGeneratorBlock.Input(
Expand Down

0 comments on commit 41e3c4f

Please sign in to comment.