diff --git a/docs/images/logfire-screenshot-mirascope-anthropic-call.png b/docs/images/logfire-screenshot-mirascope-anthropic-call.png new file mode 100644 index 00000000..ac27e42c Binary files /dev/null and b/docs/images/logfire-screenshot-mirascope-anthropic-call.png differ diff --git a/docs/images/logfire-screenshot-mirascope-openai-extractor.png b/docs/images/logfire-screenshot-mirascope-openai-extractor.png new file mode 100644 index 00000000..ee53a9ba Binary files /dev/null and b/docs/images/logfire-screenshot-mirascope-openai-extractor.png differ diff --git a/docs/integrations/third_party/mirascope.md b/docs/integrations/third_party/mirascope.md new file mode 100644 index 00000000..84de9402 --- /dev/null +++ b/docs/integrations/third_party/mirascope.md @@ -0,0 +1,96 @@ +[Mirascope](https://github.com/Mirascope/mirascope) is an intuitive approach to building AI-powered applications using LLMs. Their library integrates with Logfire to make observability and monitoring for LLMs easy and seamless. + +You can enable it using their [`@with_logire`][mirascope-logfire] decorator, which will work with all of the [model providers that they support][mirascope-supported-providers] (e.g. OpenAI, Anthropic, Groq, and more). + +```py hl_lines="1 2 5 8" +import logfire +from mirascope.logfire import with_logfire +from mirascope.anthropic import AnthropicCall + +logfire.configure() + + +@with_logfire +class BookRecommender(AnthropicCall): + prompt_template = "Please recommend some {genre} books" + + genre: str + + +recommender = BookRecommender(genre="fantasy") +response = recommender.call() # this will automatically get logged with logfire +print(response.content) +#> Here are some recommendations for great fantasy book series: ... +``` + +This will give you: + +* A span around the `AnthropicCall.call()` that captures items like the prompt template, templating properties and fields, and input/output attributes. +* Human-readable display of the conversation with the agent +* Details of the response, including the number of tokens used + +
+ ![Logfire Mirascope Anthropic call](../../images/logfire-screenshot-mirascope-anthropic-call.png){ width="500" } +
Mirascope Anthropic Call span and Anthropic span and conversation
+
+ +Since Mirascope is build on top of [Pydantic][pydantic], you can use the [Pydantic plugin][pydantic-plugin] to track additional logs and metrics about model validation, which you can enable using the [`pydantic_plugin`][logfire.configure(pydantic_plugin)] configuration. + +This can be particularly useful when [extracting structured information][mirascope-extracting-structured-information] using LLMs: + +```py hl_lines="3 4 8 17" +from typing import Literal, Type + +import logfire +from mirascope.logfire import with_logfire +from mirascope.openai import OpenAIExtractor +from pydantic import BaseModel + +logfire.configure(pydantic_plugin=logfire.PydanticPlugin(record="all")) + + +class TaskDetails(BaseModel): + description: str + due_date: str + priority: Literal["low", "normal", "high"] + + +@with_logfire +class TaskExtractor(OpenAIExtractor[TaskDetails]): + extract_schema: Type[TaskDetails] = TaskDetails + prompt_template = """ + Extract the task details from the following task: + {task} + """ + + task: str + + +task = "Submit quarterly report by next Friday. Task is high priority." +task_details = TaskExtractor( + task=task +).extract() # this will be logged automatically with logfire +assert isinstance(task_details, TaskDetails) +print(task_details) +#> description='Submit quarterly report' due_date='next Friday' priority='high' +``` + +This will give you: + +* Tracking for validation of Pydantic models. +* A span around the `OpenAIExtractor.extract()` that captures items like the prompt template, templating properties and fields, and input/output attributes. +* Human-readable display of the conversation with the agent including the function call +* Details of the response, including the number of tokens used + +
+ ![Logfire Mirascope Anthropic call](../../images/logfire-screenshot-mirascope-openai-extractor.png){ width="500" } +
Mirascope OpenAI Extractor span and OpenAI span and function call
+
+ +For more information on Mirascope and what you can do with it, check out their [documentation](https://docs.mirascope.io) + +[mirascope-logfire]: https://docs.mirascope.io/latest/integrations/logfire/#how-to-use-logfire-with-mirascope +[mirascope-supported-providers]: https://docs.mirascope.io/latest/concepts/supported_llm_providers/ +[mirascope-extracting-structured-information]: https://docs.mirascope.io/latest/concepts/extracting_structured_information_using_llms/ +[pydantic]: https://docs.pydantic.dev/latest/ +[pydantic-plugin]: https://docs.pydantic.dev/latest/concepts/plugins/ diff --git a/docs/plugins/main.py b/docs/plugins/main.py index b184ba44..57cf3c9b 100644 --- a/docs/plugins/main.py +++ b/docs/plugins/main.py @@ -20,8 +20,8 @@ def on_page_markdown(markdown: str, page: Page, config: Config, files: Files) -> markdown = build_environment_variables_table(markdown, page) markdown = logfire_print_help(markdown, page) markdown = install_logfire(markdown, page) - if page.file.src_uri == 'guides/onboarding_checklist/06_add_metrics.md': - check_documented_system_metrics(markdown, page) + markdown = check_documented_system_metrics(markdown, page) + markdown = warning_on_third_party(markdown, page) return markdown @@ -36,6 +36,9 @@ def check_documented_system_metrics(markdown: str, page: Page) -> str: This function checks that all the metrics in `DEFAULT_CONFIG` are documented. """ + if page.file.src_uri != 'guides/onboarding_checklist/06_add_metrics.md': + return markdown + metrics_documented: set[str] = set() for line in markdown.splitlines(): match = re.search(r'\* `(.*)`: ', line) @@ -134,3 +137,17 @@ def install_logfire(markdown: str, page: Page) -> str: ``` """ return re.sub(r'{{ *install_logfire\(.*\) *}}', instructions, markdown) + + +def warning_on_third_party(markdown: str, page: Page) -> str: + if not page.file.src_uri.startswith('integrations/third_party/'): + return markdown + + note = """ +!!! note "Third-party integrations" + Third-party integrations are not officially supported by **Logfire**. + + They are maintained by the community and may not be as reliable as the integrations developed by **Logfire**. +""" + + return note + markdown diff --git a/mkdocs.yml b/mkdocs.yml index 78dc3421..c6d77dc0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -117,6 +117,8 @@ nav: - Logging: integrations/logging.md - Structlog: integrations/structlog.md - Loguru: integrations/loguru.md + - Third Party: + - Mirascope: integrations/third_party/mirascope.md - Use Cases: - Web Frameworks: integrations/use_cases/web_frameworks.md - Reference: