Skip to content

Commit

Permalink
Merge pull request #1596 from crewAIInc/tm-recording-cached-prompt-to…
Browse files Browse the repository at this point in the history
…kens

Add cached prompt tokens info on usage metrics
  • Loading branch information
thiagomoretto authored Nov 13, 2024
2 parents b98f8f9 + f02681a commit 7fb1289
Show file tree
Hide file tree
Showing 5 changed files with 136 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
class TokenProcess:
total_tokens: int = 0
prompt_tokens: int = 0
cached_prompt_tokens: int = 0
completion_tokens: int = 0
successful_requests: int = 0

Expand All @@ -15,13 +16,17 @@ def sum_completion_tokens(self, tokens: int):
self.completion_tokens = self.completion_tokens + tokens
self.total_tokens = self.total_tokens + tokens

def sum_cached_prompt_tokens(self, tokens: int):
self.cached_prompt_tokens = self.cached_prompt_tokens + tokens

def sum_successful_requests(self, requests: int):
self.successful_requests = self.successful_requests + requests

def get_summary(self) -> UsageMetrics:
return UsageMetrics(
total_tokens=self.total_tokens,
prompt_tokens=self.prompt_tokens,
cached_prompt_tokens=self.cached_prompt_tokens,
completion_tokens=self.completion_tokens,
successful_requests=self.successful_requests,
)
5 changes: 5 additions & 0 deletions src/crewai/types/usage_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ class UsageMetrics(BaseModel):
Attributes:
total_tokens: Total number of tokens used.
prompt_tokens: Number of tokens used in prompts.
cached_prompt_tokens: Number of cached prompt tokens used.
completion_tokens: Number of tokens used in completions.
successful_requests: Number of successful requests made.
"""
Expand All @@ -16,6 +17,9 @@ class UsageMetrics(BaseModel):
prompt_tokens: int = Field(
default=0, description="Number of tokens used in prompts."
)
cached_prompt_tokens: int = Field(
default=0, description="Number of cached prompt tokens used."
)
completion_tokens: int = Field(
default=0, description="Number of tokens used in completions."
)
Expand All @@ -32,5 +36,6 @@ def add_usage_metrics(self, usage_metrics: "UsageMetrics"):
"""
self.total_tokens += usage_metrics.total_tokens
self.prompt_tokens += usage_metrics.prompt_tokens
self.cached_prompt_tokens += usage_metrics.cached_prompt_tokens
self.completion_tokens += usage_metrics.completion_tokens
self.successful_requests += usage_metrics.successful_requests
13 changes: 8 additions & 5 deletions src/crewai/utilities/token_counter_callback.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from litellm.integrations.custom_logger import CustomLogger

from litellm.types.utils import Usage
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess


Expand All @@ -11,8 +11,11 @@ def log_success_event(self, kwargs, response_obj, start_time, end_time):
if self.token_cost_process is None:
return

usage : Usage = response_obj["usage"]
self.token_cost_process.sum_successful_requests(1)
self.token_cost_process.sum_prompt_tokens(response_obj["usage"].prompt_tokens)
self.token_cost_process.sum_completion_tokens(
response_obj["usage"].completion_tokens
)
self.token_cost_process.sum_prompt_tokens(usage.prompt_tokens)
self.token_cost_process.sum_completion_tokens(usage.completion_tokens)
if usage.prompt_tokens_details:
self.token_cost_process.sum_cached_prompt_tokens(
usage.prompt_tokens_details.cached_tokens
)
Loading

0 comments on commit 7fb1289

Please sign in to comment.