From 168827e72aafc07e3d61559fbbba7b5bc0304601 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 12:50:30 -0400 Subject: [PATCH 01/33] initial commit --- .circleci/config.templ.yml | 8 + .riot/requirements/1f1413a.txt | 62 +++++++ ddtrace/_monkey.py | 1 + ddtrace/contrib/anthropic/__init__.py | 17 ++ ddtrace/contrib/anthropic/patch.py | 158 ++++++++++++++++++ ddtrace/contrib/anthropic/utils.py | 30 ++++ ddtrace/llmobs/_integrations/__init__.py | 9 +- ddtrace/llmobs/_integrations/anthropic.py | 35 ++++ tests/.suitespec.json | 14 ++ tests/contrib/anthropic/__init__.py | 0 .../cassettes/anthropic_completion_error.yaml | 67 ++++++++ .../cassettes/anthropic_completion_sync.yaml | 95 +++++++++++ .../anthropic_completion_sync_39.yaml | 95 +++++++++++ ...nthropic_completion_sync_multi_prompt.yaml | 97 +++++++++++ ...n_sync_multi_prompt_with_chat_history.yaml | 95 +++++++++++ tests/contrib/anthropic/conftest.py | 48 ++++++ tests/contrib/anthropic/test_anthropic.py | 115 +++++++++++++ .../contrib/anthropic/test_anthropic_patch.py | 21 +++ tests/contrib/anthropic/utils.py | 30 ++++ ...st_anthropic.test_anthropic_llm_error.json | 32 ++++ ...est_anthropic.test_anthropic_llm_sync.json | 38 +++++ ...t_anthropic_llm_sync_multiple_prompts.json | 40 +++++ ...nc_multiple_prompts_with_chat_history.json | 48 ++++++ 23 files changed, 1154 insertions(+), 1 deletion(-) create mode 100644 .riot/requirements/1f1413a.txt create mode 100644 ddtrace/contrib/anthropic/__init__.py create mode 100644 ddtrace/contrib/anthropic/patch.py create mode 100644 ddtrace/contrib/anthropic/utils.py create mode 100644 ddtrace/llmobs/_integrations/anthropic.py create mode 100644 tests/contrib/anthropic/__init__.py create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml create mode 100644 tests/contrib/anthropic/conftest.py create mode 100644 tests/contrib/anthropic/test_anthropic.py create mode 100644 tests/contrib/anthropic/test_anthropic_patch.py create mode 100644 tests/contrib/anthropic/utils.py create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json diff --git a/.circleci/config.templ.yml b/.circleci/config.templ.yml index a02d01e8665..dfe0f7c5746 100644 --- a/.circleci/config.templ.yml +++ b/.circleci/config.templ.yml @@ -1299,6 +1299,14 @@ jobs: pattern: "langchain" snapshot: true + anthropic: + <<: *machine_executor + parallelism: 3 + steps: + - run_test: + pattern: "anthropic" + snapshot: true + logbook: <<: *machine_executor steps: diff --git a/.riot/requirements/1f1413a.txt b/.riot/requirements/1f1413a.txt new file mode 100644 index 00000000000..f8258a6316f --- /dev/null +++ b/.riot/requirements/1f1413a.txt @@ -0,0 +1,62 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/1f1413a.in +# +ai21==2.4.1 +ai21-tokenizer==0.9.1 +annotated-types==0.7.0 +anthropic==0.28.0 +anyio==4.4.0 +attrs==23.2.0 +certifi==2024.6.2 +charset-normalizer==3.3.2 +coverage[toml]==7.5.3 +dataclasses-json==0.6.6 +distro==1.9.0 +exceptiongroup==1.2.1 +filelock==3.14.0 +fsspec==2024.5.0 +h11==0.14.0 +httpcore==1.0.5 +httpx==0.27.0 +huggingface-hub==0.23.2 +hypothesis==6.45.0 +idna==3.7 +iniconfig==2.0.0 +jiter==0.4.1 +marshmallow==3.21.2 +mock==5.1.0 +multidict==6.0.5 +mypy-extensions==1.0.0 +numexpr==2.10.0 +numpy==1.26.4 +opentracing==2.4.0 +packaging==24.0 +pluggy==1.5.0 +psutil==5.9.8 +pydantic==2.7.2 +pydantic-core==2.18.3 +pytest==8.2.1 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +pyyaml==6.0.1 +regex==2024.5.15 +requests==2.32.3 +sentencepiece==0.2.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tenacity==8.3.0 +tiktoken==0.7.0 +tokenizers==0.19.1 +tomli==2.0.1 +tqdm==4.66.4 +typing-extensions==4.12.1 +typing-inspect==0.9.0 +urllib3==2.2.1 +vcrpy==6.0.1 +wrapt==1.16.0 +yarl==1.9.4 diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index 9c6947d116f..9868767f037 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -91,6 +91,7 @@ "tornado": False, "openai": True, "langchain": True, + "anthropic": True, "subprocess": True, "unittest": True, "coverage": False, diff --git a/ddtrace/contrib/anthropic/__init__.py b/ddtrace/contrib/anthropic/__init__.py new file mode 100644 index 00000000000..aeff9842012 --- /dev/null +++ b/ddtrace/contrib/anthropic/__init__.py @@ -0,0 +1,17 @@ +""" +Do later. +""" # noqa: E501 +from ...internal.utils.importlib import require_modules + + +required_modules = ["anthropic"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + get_version = _patch.get_version + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py new file mode 100644 index 00000000000..c3872bdc735 --- /dev/null +++ b/ddtrace/contrib/anthropic/patch.py @@ -0,0 +1,158 @@ +import json +import os +import sys +from typing import Any + +import anthropic + +from ddtrace import config +from ddtrace.contrib.trace_utils import unwrap +from ddtrace.contrib.trace_utils import with_traced_module +from ddtrace.contrib.trace_utils import wrap +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import get_argument_value +from ddtrace.llmobs._integrations import AnthropicIntegration +from ddtrace.pin import Pin + +from .utils import _get_attr +from .utils import record_usage + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return getattr(anthropic, "__version__", "") + + +config._add( + "anthropic", + { + "span_prompt_completion_sample_rate": float(os.getenv("DD_ANTHROPIC_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), + "span_char_limit": int(os.getenv("DD_ANTHROPIC_SPAN_CHAR_LIMIT", 128)), + }, +) + + +def _extract_api_key(instance: Any) -> str: + """ + Extract and format LLM-provider API key from instance. + """ + client = getattr(instance, "_client", "") + if client: + return getattr(client, "api_key", None) + return None + + +@with_traced_module +def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): + chat_messages = get_argument_value(args, kwargs, 0, "messages") + integration = anthropic._datadog_integration + + operation_name = func.__name__ + + span = integration.trace( + pin, + "%s.%s.%s" % (instance.__module__, instance.__class__.__name__, operation_name), + submit_to_llmobs=True, + interface_type="chat_model", + provider="anthropic", + model=kwargs.get("model", ""), + api_key=_extract_api_key(instance), + ) + + chat_completions = None + try: + for message_idx, message in enumerate(chat_messages): + if isinstance(message, dict): + if isinstance(message.get("content", None), str): + if integration.is_pc_sampled_span(span) and message.get("content", "") != "": + span.set_tag_str( + "anthropic.request.messages.%d.content.0.text" % (message_idx), + integration.trunc(message.get("content", "")), + ) + span.set_tag_str( + "anthropic.request.messages.%d.content.0.type" % (message_idx), + "text", + ) + elif isinstance(message.get("content", None), list): + for block_idx, block in enumerate(message.get("content", [])): + if integration.is_pc_sampled_span(span): + if block.get("type", None) == "text" and block.get("text", "") != "": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + integration.trunc(str(block.get("text", ""))), + ) + elif block.get("type", None) == "image": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + "([IMAGE DETECTED])", + ) + + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), + block.get("type", "text"), + ) + span.set_tag_str( + "anthropic.request.messages.%d.role" % (message_idx), + message.get("role", ""), + ) + params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} + span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) + + chat_completions = func(*args, **kwargs) + + handle_non_streamed_response(integration, chat_completions, args, kwargs, span) + except Exception: + span.set_exc_info(*sys.exc_info()) + span.finish() + raise + finally: + span.finish() + return chat_completions + + +def handle_non_streamed_response(integration, chat_completions, args, kwargs, span): + for idx, chat_completion in enumerate(chat_completions.content): + if integration.is_pc_sampled_span(span) and getattr(chat_completion, "text", "") != "": + span.set_tag_str( + "anthropic.response.completions.content.%d.text" % (idx), + integration.trunc(str(getattr(chat_completion, "text", ""))), + ) + span.set_tag_str( + "anthropic.response.completions.content.%d.type" % (idx), + chat_completion.type, + ) + + # set message level tags + if getattr(chat_completions, "stop_reason", None) is not None: + span.set_tag_str("anthropic.response.completions.finish_reason", chat_completions.stop_reason) + span.set_tag_str("anthropic.response.completions.role", chat_completions.role) + + usage = _get_attr(chat_completions, "usage", {}) + record_usage(span, usage) + + +def patch(): + if getattr(anthropic, "_datadog_patch", False): + return + + anthropic._datadog_patch = True + + Pin().onto(anthropic) + integration = AnthropicIntegration(integration_config=config.anthropic) + anthropic._datadog_integration = integration + + wrap("anthropic", "resources.messages.Messages.create", traced_chat_model_generate(anthropic)) + + +def unpatch(): + if not getattr(anthropic, "_datadog_patch", False): + return + + anthropic._datadog_patch = False + + unwrap(anthropic.resources.messages.Messages, "create") + + delattr(anthropic, "_datadog_integration") diff --git a/ddtrace/contrib/anthropic/utils.py b/ddtrace/contrib/anthropic/utils.py new file mode 100644 index 00000000000..2833d3d05ef --- /dev/null +++ b/ddtrace/contrib/anthropic/utils.py @@ -0,0 +1,30 @@ +from typing import Any +from typing import Dict + +from ddtrace._trace.span import Span +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def _get_attr(o: Any, attr: str, default: Any): + # Since our response may be a dict or object, convenience method + if isinstance(o, dict): + return o.get(attr, default) + else: + return getattr(o, attr, default) + + +def record_usage(span: Span, usage: Dict[str, Any]) -> None: + if not usage: + return + for token_type in ("input", "output"): + num_tokens = _get_attr(usage, "%s_tokens" % token_type, None) + if num_tokens is None: + continue + span.set_metric("anthropic.response.usage.%s_tokens" % token_type, num_tokens) + + if "input" in usage and "output" in usage: + total_tokens = usage["output"] + usage["input"] + span.set_metric("anthropic.response.usage.total_tokens", total_tokens) diff --git a/ddtrace/llmobs/_integrations/__init__.py b/ddtrace/llmobs/_integrations/__init__.py index 7e96ff6648e..465cab1bb3d 100644 --- a/ddtrace/llmobs/_integrations/__init__.py +++ b/ddtrace/llmobs/_integrations/__init__.py @@ -1,7 +1,14 @@ +from .anthropic import AnthropicIntegration from .base import BaseLLMIntegration from .bedrock import BedrockIntegration from .langchain import LangChainIntegration from .openai import OpenAIIntegration -__all__ = ["BaseLLMIntegration", "BedrockIntegration", "LangChainIntegration", "OpenAIIntegration"] +__all__ = [ + "AnthropicIntegration", + "BaseLLMIntegration", + "BedrockIntegration", + "LangChainIntegration", + "OpenAIIntegration", +] diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py new file mode 100644 index 00000000000..36e3baa5aa8 --- /dev/null +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -0,0 +1,35 @@ +from typing import Any +from typing import Dict +from typing import Optional + +from ddtrace._trace.span import Span +from ddtrace.internal.logger import get_logger + +from .base import BaseLLMIntegration + + +log = get_logger(__name__) + + +API_KEY = "anthropic.request.api_key" +MODEL = "anthropic.request.model" + + +class AnthropicIntegration(BaseLLMIntegration): + _integration_name = "anthropic" + + def _set_base_span_tags( + self, + span: Span, + model: Optional[str] = None, + api_key: Optional[str] = None, + **kwargs: Dict[str, Any], + ) -> None: + """Set base level tags that should be present on all Anthropic spans (if they are not None).""" + if model is not None: + span.set_tag_str(MODEL, model) + if api_key is not None: + if len(api_key) >= 4: + span.set_tag_str(API_KEY, f"...{str(api_key[-4:])}") + else: + span.set_tag_str(API_KEY, api_key) diff --git a/tests/.suitespec.json b/tests/.suitespec.json index 666c6a44d4d..a54e75b119a 100644 --- a/tests/.suitespec.json +++ b/tests/.suitespec.json @@ -247,6 +247,9 @@ "langchain": [ "ddtrace/contrib/langchain/*" ], + "anthropic": [ + "ddtrace/contrib/anthropic/*" + ], "subprocess": [ "ddtrace/contrib/subprocess/*" ], @@ -1380,6 +1383,17 @@ "tests/contrib/langchain/*", "tests/snapshots/tests.contrib.{suite}.*" ], + "anthropic": [ + "@bootstrap", + "@core", + "@tracing", + "@contrib", + "@anthropic", + "@requests", + "@llmobs", + "tests/contrib/anthropic/*", + "tests/snapshots/tests.contrib.anthropic.*" + ], "runtime": [ "@core", "@runtime", diff --git a/tests/contrib/anthropic/__init__.py b/tests/contrib/anthropic/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml new file mode 100644 index 00000000000..bbaf3267206 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml @@ -0,0 +1,67 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": ["Invalid content"], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '88' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.26.1 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.26.1 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: '{"type":"error","error":{"type":"invalid_request_error","message":"messages.0: + Input does not match the expected shape."}}' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88c85bd75e437274-EWR + Connection: + - keep-alive + Content-Length: + - '122' + Content-Type: + - application/json + Date: + - Fri, 31 May 2024 16:32:14 GMT + Server: + - cloudflare + request-id: + - req_01N7iW6qh7wHr9je4z3FDn2n + via: + - 1.1 google + x-cloud-trace-context: + - cf561ed8cbadcfc1748718321572db36 + x-should-retry: + - 'false' + status: + code: 400 + message: Bad Request +version: 1 \ No newline at end of file diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml new file mode 100644 index 00000000000..1f8d5f0500b --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml @@ -0,0 +1,95 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": + "text", "text": "Can you explain what Descartes meant by ''I think, therefore + I am''?"}]}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '196' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.26.1 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.26.1 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA3xU224cNwz9FUIvaYHxwlkHNbqPberaRQoURZAC6RaGVuKsVGvEiUjtemP4g/od + /bGAmr0Bbvo0wIg8PDw85JOJ3izMwOv7y9e/ucz05u/rYfdhc/PLx3efw/Vt+MF0RnYjahQy2zWa + zhRK+sMyRxabxXRmII/JLIxLtnq8uLqgsfLF/HL+5nI+/950xlEWzGIWfz4dAAUfNbV9FuZ9QBhD + sYywNHcgIeaHDiRgwZ4Kwh3YYWkgMliQYjMnK5EyUK9B8M5KzCeAH2kdhTrAsibgOnRLA9sQXYCt + ZXAUM3pY7VrqTcHsAowhJmIaAxb4HfO//8BbZGeLIEPMECLDlsoDLM3byI5qYQTKDeFXlEB+aeCb + 199dXX87g/cazWIFB8yipDWsp5r9kfUR/dWp9A5s9lOPjkrGwkIZNfoPZMGSz0Jny7zMJ4raF6Mt + LsS8hp4K2AyxFOyr2FVCkFIlgAQr4KgmD4xlg2C12BmzlpkSkEoPD5m2Cf0aZ3CLgMOYaIceLAyt + ZWXGOxYcrEQHnupKOvhUkRVLieAGy04ap4CwwhRxgx6EYNUoYQcxu1S9RqhI+BhZMDs8TBYftXGb + VP3km0C4wWkgtM2wIj9pcUtbrdadza2gTfGz1tO2m5maUCrqkdSkRiaZ+C+mWVknU1aYUpohY17P + 4CetHnt9aAnoz7vExNh8C/oPFIb6KVCfqRyRYCy0OXAL+6T2qE0MlWUSYwZ3eT+OLRXPU+Ae+AgW + hTH16p2xEPX6Rhlf8UnQJtJNoUGTDtq0oZ9L5qiN40yz/1JrheCwiI3NyqrPYRyn+TVnHfmtsInX + NmOFzg7YsFfoC7kHqCPl/YaeitSYfAM+uj46m/aOm5zAau7I4aV3JsUab+6AqwvK6GeaLPTSWU2e + u6zXYrBl1/3vGcLs7Mg1WZXsbJM3Ebcn4V7MaLoDA7FArxun18Gmr05sz7ShaW7b4JfH5Gsra57/ + 6gwLjfcFLVM2C4PZ30st2ewfGD9VrWQWuabUmdpO/OLJxDxWuRd6wMxmMb/uDFU5/3V1efX8/AUA + AP//AwAfgEAQQQYAAA== + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88c85b052be7428b-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Fri, 31 May 2024 16:31:57 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '5' + anthropic-ratelimit-requests-reset: + - '2024-05-31T16:32:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-05-31T16:32:57Z' + request-id: + - req_01Ybd82xxyNova6PBsMeHobV + via: + - 1.1 google + x-cloud-trace-context: + - 1ba1eaa11fc86ede89ae23d3ae4aefe1 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml new file mode 100644 index 00000000000..479ace5a990 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml @@ -0,0 +1,95 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What does + Nietzsche mean by ''God is dead''?"}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '144' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA4RVzY4bNwx+FUKXtsBkkfWmReJbD+2mQNAekiBo62JBjzgeYjXkVKTsToK8Sd6m + L1ZI9m686aI9GdBQ1PfDj/4QOIZ1mGx38/Ty27e/rn4cvnvxdvltpJfXr6bl1ffvY+iCLzPVKjLD + HYUuZE31AM3YHMVDFyaNlMI69AlLpCdXT3Qu9mT1dPXs6Wr1InShV3ESD+vfP9w1dPqrXm0/6/Bu + JIGfmfy99SPBgJMWSwtE6hNmirAJ1xqBDSJh3ARggZENtqq3sAlvRoJrXOB1zyQ9bQJ8ffn8+eqb + DkaCAxqIOiR2ypjSAlZ2OzJn2YGP6IAQec9CsKV6NmKEyBQv4CcxJ4z3bSa8rQUI88hJTeeRe0yA + EqEvyUvGBLo1ynt0VgHcanHwkRqR+oIO4Bkj18+YIFPiHWsx2FJiGlqrWt+rGP1ZKhtrlypbG3nw + Sv0dmVMWMO2ZfLnYyEZeUibATGA6EdzSArOyuIErFImUq1vxBOle6q8MzNFpIvF1bXN5AW8e4v2M + EYuPmtmX9ZlXmHeF4lHIivwHSbwbnaS27MCqJc4D94Bxj9K3p6y7Z5rZ2jNGfUmYYSwTCtvUbKCs + kY51gxaJTVb7PxWbHqsjkaTW6nFrmooTTFpd2mMqZGt4xz5+adDJCha41tidUe0T8nTO9c6Hg+YU + G+Ck5sBusEVjg0Hz4y93kAhjm0AFBCM5qiA8cqrsqzyRbEbOjc7VkY4QxdY1U23U9Dh3o2Hfn2Pc + hEjoY21+rTU6cyYjcYqAAjrPmr0I+9LaHtX3pcLqM6HXJw8n0A3URCgsO+sqRYqgAiw1P7F8GQUc + Bs7TMQo6QOKBIKOPlCs8OfMtau+ZhRrVZ0eqd4NRSfz9aUt5IrF+XD8Y3l6lp9nv6jbhrHITQDNs + wusyU55QNgEyndhbK+dINbFDTTRlU4HDqEDTNmPNXQXcnWRoFziDHuTewkq2orSaqj21lokn9sfH + tPlftWUBPM3MgX2sgbzW2Lj/sm87qntAcs7aJ/ws5KO2sgHCRI7zqLl5WcvmrC04/94dR5eokbhf + Zwug/feCOuUktnt1NA64NKafd8xxr9KJYZuIKmE9Pg0P0ER5R/EifPyjC+Y632RCUwnrQBJvvGQJ + pw93azCspaTUhdL+h9YfAstc/Mb1lsTCerXqghY/P7q6uvz48R8AAAD//wMAxlhmZuYGAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e11b85181a0cc4-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 16:37:42 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T16:37:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T16:37:57Z' + request-id: + - req_01End84WeJzYrMjenfX3msVw + via: + - 1.1 google + x-cloud-trace-context: + - 884af431d7fdfbe4b21bde7aeefd24d6 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml new file mode 100644 index 00000000000..199883b838a --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml @@ -0,0 +1,97 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": + "text", "text": "Hello, I am looking for information about some books!"}, {"type": + "text", "text": "Can you explain what Descartes meant by ''I think, therefore + I am''?"}]}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '279' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.26.1 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.26.1 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA3xV224bRwz9FWJeagNrwZfGSfRWNBe7bV6KJIVRFcZol6tlNUtuhhwpW8Mf1O/o + jwUzK1l2kPRFC81yeA4PD7l3jho3d72ubk/Pfvn4xzs5f/uBbq4uPtRvf7188frmZu0qZ+OAOQpV + /Qpd5aKEfOBVSc2zucr10mBwc1cHnxo8uTiRIenJ+en5j6fn5y9d5WphQzY3//Nun9Dwc75aHnP3 + vkMYuugVYeGuwTridQXWYcRWIsI1+H7h4EgirYh9CCMQw2/eiMErLNzPsiKTCjCuBDT1C3cMpOBh + 6CiIytBR7QOoecMe2WA55vTwJiLX3SEKI/yO/N+/8Aq19tFQ4ejs2cvLk7PLZ6fHM7g28MOAPmom + 0JHCVuIaFu4VaS0pKoJwyfwOrZMmkz67vHh+DJ6bidEaR8AwsZC25HiAH2cLXvABe+sVFHFNvAIP + rSRuvJGwD2AxWQdpEIZtR3UHHUItKTSwTBQa8KCjGvYZo8ZonhjWLNuAzQpncIWA/RBkxBzaF7I5 + dLrkjWpoJC2tgk8JNWNmDrmwjQ/UkI052odQClhiIGy1FCkDMQlrwSiUwTpvB34lLzD6GEbADcYx + 93tVAXEdUrPHiRjILynsoDKMIitqMQbgZ1JDrjG/nA4MY1ZmKzE0VeGCG2TovXVTSXvZtMh8JduM + Xj3qdUQf6B/8mjGL7VhnoNbX9hCQO1TsSryaweuMR5MBcpS0071S3i5ajUJ4dOdNlD7/1WoCLCI8 + UMiW2YlgMt3aYWanYTlZPeo9PjRb2hn8pBAke0e/5lrAOt/krEXKGbzPYAEb6KifwHKtvSSFVmKf + QrFe9f0JnS1c1rXkOUyaYtxkk+m3HdxKPDSg+p+6NMXS69zX79v+yThNM18cXUYX1fwykHZF3m94 + SDG0E9O9TrDE/Hs0OVKhzovGs1Yg8RBU2E4zvsTa95jHz3zMjYdBiK3Umcm1KWbVoMeGrEih+5XB + 3nY1Fh/aWB0mdufnPeGne+IHoAZ96S9xGxKykQ97Iza4wSDDfuHkbR350c6poPdxt2K0o9bAZOtj + MzWsToWfpuXfWBttshM9T/rlb8FeugctiobEkLjBmD8RZaJ3Fc3c/V+VU5PhNqJXYTd3yM2tpchu + 90LxU8o1ujmnECqXypdnfueIh2S3JmtkdfOLF5WTZE+OLp7f338BAAD//wMAFHVJ9tgGAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88c85b706a4f5e76-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Fri, 31 May 2024 16:32:13 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-05-31T16:32:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-05-31T16:32:57Z' + request-id: + - req_01T5fqMvSyUMwwDMphBKC7Ba + via: + - 1.1 google + x-cloud-trace-context: + - 62a976e4f5e79017b5e9a1a54801766b + status: + code: 200 + message: OK +version: 1 \ No newline at end of file diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml new file mode 100644 index 00000000000..c6d930d9b7c --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml @@ -0,0 +1,95 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": + "text", "text": "Hello, Start all responses with your name Claude."}, {"type": + "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}]}, {"role": + "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, {"role": + "user", "content": [{"type": "text", "text": "Add the time and date to the beginning + of your response after your name."}, {"type": "text", "text": "Explain string + theory succinctly to a complete noob."}]}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '555' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA1xT204kNxD9lYqfDZqZBWnTbwg2iXIj2oVECVqhwq6etsZdbqrKM9tC/PvKzSWI + J9/q1Dk+x35wKbrOjbq9Xa1//m84X5+m307D4Z9ffr2+nr/IRY/OO5snalWkilty3knJbQNVkxqy + Oe/GEim7zoWMNdLRh6MyVT3arDYnq83mR+ddKGzE5rqbh5eGRt8adBk6d74gPfwkKeLs4WySlGHz + 0cNmtfkAaLBed6sTOPujgy8mibdgAxWZISng05wsBczQC450KLKDxDANs6agYAMaTFKmovS8soGg + rxxxJDbMcFdTjq3xXS5hp1D6paRy2pMoAQqBJZ497NOdoLVSXaQstcQk2xkEbSBpDAxTSWxHOe0I + JhRLIZMew9VASq/IpetQ6nYwsPLcmpr0mPqehNjggLN6QI5AGIYX+sIQigjpVDhqA+MbzAuhB61h + AFRABsoUTAp7uK8oOw9FYBqKFT5+5ypmLaB1uyW1//2SJxcwxtT4MYNOaAkzxDQSayqscEdz4bh4 + Z4MQwYEgNDdIAqU9eTgMqUkSglAlU4Q6tYT3JDPoiDmDBnzx6lVRGpdLVk793PSz1RFGCgNyS7jZ + s20hYAahjJb2yWbfQt+nJVdcsIkiRNIgaVo8LD00xvevoS8SaAmW0arQMdycX/71r4fz38+uLz7B + 5d+fPsPZnxdweX31w1f3+NU7tTLdCqEWdp0jjrdWhd3zgdJ9JQ7kOq45e1eX79Q9uMRTtVsrO2J1 + 3ccT70q1t1vr09PHx+8AAAD//wMAORpVdq0DAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e120459bfe422e-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 16:40:47 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T16:40:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T16:40:57Z' + request-id: + - req_0169ecrQS9L2NLLJ5kxTHNhR + via: + - 1.1 google + x-cloud-trace-context: + - 3ca44323b7c7d8d7e47380ba95e440a0 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py new file mode 100644 index 00000000000..fe0010849e6 --- /dev/null +++ b/tests/contrib/anthropic/conftest.py @@ -0,0 +1,48 @@ +import os + +import pytest + +from ddtrace import Pin +from ddtrace.contrib.anthropic.patch import patch +from ddtrace.contrib.anthropic.patch import unpatch +from tests.utils import DummyTracer +from tests.utils import DummyWriter +from tests.utils import override_config +from tests.utils import override_env +from tests.utils import override_global_config + + +@pytest.fixture +def ddtrace_config_anthropic(): + return {} + + +@pytest.fixture +def snapshot_tracer(anthropic): + pin = Pin.get_from(anthropic) + yield pin.tracer + + +@pytest.fixture +def mock_tracer(anthropic): + pin = Pin.get_from(anthropic) + mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) + pin.override(anthropic, tracer=mock_tracer) + pin.tracer.configure() + yield mock_tracer + + +@pytest.fixture +def anthropic(ddtrace_config_anthropic): + with override_global_config({"_dd_api_key": ""}): + with override_config("anthropic", ddtrace_config_anthropic): + with override_env( + dict( + ANTHROPIC_API_KEY=os.getenv("ANTHROPIC_API_KEY", ""), + ) + ): + patch() + import anthropic + + yield anthropic + unpatch() diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py new file mode 100644 index 00000000000..4e0d0a63bc9 --- /dev/null +++ b/tests/contrib/anthropic/test_anthropic.py @@ -0,0 +1,115 @@ +import pytest + +from tests.contrib.anthropic.utils import get_request_vcr +from tests.utils import override_global_config + + +@pytest.fixture(scope="session") +def request_vcr(): + yield get_request_vcr() + + +def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): + """ + When the global config UST tags are set + The service name should be used for all data + The env should be used for all data + The version should be used for all data + """ + llm = anthropic.Anthropic() + with override_global_config(dict(service="test-svc", env="staging", version="1234")): + cassette_name = "anthropic_completion_sync_39.yaml" + with request_vcr.use_cassette(cassette_name): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], + ) + + span = mock_tracer.pop_traces()[0][0] + assert span.resource == "anthropic.resources.messages.Messages.create" + assert span.service == "test-svc" + assert span.get_tag("env") == "staging" + assert span.get_tag("version") == "1234" + assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" + assert span.get_tag("anthropic.request.api_key") == "...key>" + + +# @pytest.mark.snapshot(ignores=["metrics.anthropic.tokens.total_cost", "resource"]) +@pytest.mark.snapshot() +def test_anthropic_llm_sync(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_sync.yaml"): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + } + ], + ) + + +@pytest.mark.snapshot() +def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt.yaml"): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, I am looking for information about some books!"}, + {"type": "text", "text": "Can you explain what Descartes meant by 'I think, therefore I am'?"}, + ], + } + ], + ) + + +@pytest.mark.snapshot() +def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt_with_chat_history.yaml"): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, Start all responses with your name Claude."}, + {"type": "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}, + ], + }, + {"role": "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Add the time and date to the beginning of your response after your name.", + }, + {"type": "text", "text": "Explain string theory succinctly to a complete noob."}, + ], + }, + ], + ) + + +@pytest.mark.snapshot(ignores=["meta.error.stack"]) +def test_anthropic_llm_error(anthropic, request_vcr): + llm = anthropic.Anthropic() + invalid_error = anthropic.BadRequestError + with pytest.raises(invalid_error): + with request_vcr.use_cassette("anthropic_completion_error.yaml"): + llm.messages.create(model="claude-3-opus-20240229", max_tokens=1024, messages=["Invalid content"]) diff --git a/tests/contrib/anthropic/test_anthropic_patch.py b/tests/contrib/anthropic/test_anthropic_patch.py new file mode 100644 index 00000000000..a5732bf5902 --- /dev/null +++ b/tests/contrib/anthropic/test_anthropic_patch.py @@ -0,0 +1,21 @@ +from ddtrace.contrib.anthropic import get_version +from ddtrace.contrib.anthropic import patch +from ddtrace.contrib.anthropic import unpatch +from tests.contrib.patch import PatchTestCase + + +class TestAnthropicPatch(PatchTestCase.Base): + __integration_name__ = "anthropic" + __module_name__ = "anthropic" + __patch_func__ = patch + __unpatch_func__ = unpatch + __get_version__ = get_version + + def assert_module_patched(self, anthropic): + self.assert_wrapped(anthropic.resources.messages.Messages.create) + + def assert_not_module_patched(self, anthropic): + self.assert_not_wrapped(anthropic.resources.messages.Messages.create) + + def assert_not_module_double_patched(self, anthropic): + self.assert_not_double_wrapped(anthropic.resources.messages.Messages.create) diff --git a/tests/contrib/anthropic/utils.py b/tests/contrib/anthropic/utils.py new file mode 100644 index 00000000000..c47812650cd --- /dev/null +++ b/tests/contrib/anthropic/utils.py @@ -0,0 +1,30 @@ +import os + +import vcr + + +def iswrapped(obj): + return hasattr(obj, "__dd_wrapped__") + + +# VCR is used to capture and store network requests made to Anthropic. +# This is done to avoid making real calls to the API which could introduce +# flakiness and cost. + + +# To (re)-generate the cassettes: pass a real Anthropic API key with +# ANTHROPIC_API_KEY, delete the old cassettes and re-run the tests. +# NOTE: be sure to check that the generated cassettes don't contain your +# API key. Keys should be redacted by the filter_headers option below. +# NOTE: that different cassettes have to be used between sync and async +# due to this issue: https://github.com/kevin1024/vcrpy/issues/463 +# between cassettes generated for requests and aiohttp. +def get_request_vcr(): + return vcr.VCR( + cassette_library_dir=os.path.join(os.path.dirname(__file__), "cassettes"), + record_mode="once", + match_on=["path"], + filter_headers=["authorization", "x-api-key", "api-key"], + # Ignore requests to the agent + ignore_localhost=True, + ) diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json new file mode 100644 index 00000000000..86ddb4ffd9d --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json @@ -0,0 +1,32 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "anthropic.resources.messages.Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 1, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665de86c00000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", + "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", + "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 106, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 681, in create\n return self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1239, in post\n return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 921, in request\n return self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1019, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", + "error.type": "anthropic.BadRequestError", + "language": "python", + "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 37192 + }, + "duration": 2603000, + "start": 1717430380420422000 + }]] \ No newline at end of file diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json new file mode 100644 index 00000000000..bf80302b51b --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json @@ -0,0 +1,38 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "anthropic.resources.messages.Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665de86c00000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" is a translation of the Latin phrase \"Cogito, ergo sum,\" which was coined by the French phi...", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 303, + "process_id": 37192 + }, + "duration": 2370000, + "start": 1717430380355108000 + }]] \ No newline at end of file diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json new file mode 100644 index 00000000000..be993323a18 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json @@ -0,0 +1,40 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "anthropic.resources.messages.Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665de86c00000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.1.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as \"Cogito, ergo sum\") is a philosophical statement by the French phil...", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 38, + "anthropic.response.usage.output_tokens": 337, + "process_id": 37192 + }, + "duration": 2667000, + "start": 1717430380393742000 + }]] \ No newline at end of file diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json new file mode 100644 index 00000000000..989d3ab711f --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json @@ -0,0 +1,48 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "anthropic.resources.messages.Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665df39100000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "End all responses with [COPY, CLAUDE OVER AND OUT!]", + "anthropic.request.messages.0.content.1.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.messages.1.content.0.text": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]", + "anthropic.request.messages.1.content.0.type": "text", + "anthropic.request.messages.1.role": "assistant", + "anthropic.request.messages.2.content.0.text": "Add the time and date to the beginning of your response after your name.", + "anthropic.request.messages.2.content.0.type": "text", + "anthropic.request.messages.2.content.1.text": "Explain string theory succinctly to a complete noob.", + "anthropic.request.messages.2.content.1.type": "text", + "anthropic.request.messages.2.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", + "anthropic.response.completions.content.0.text": "Claude, Friday, April 28, 2023 at 11:04 AM: String theory is a theoretical framework in physics that proposes that the fundament...", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "0af264443f1441098adc8b487438cebe" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 84, + "anthropic.response.usage.output_tokens": 155, + "process_id": 88493 + }, + "duration": 4876000, + "start": 1717433233172216000 + }]] From 3a568de3c6f52e63b3037dbaa9368224b59b31fd Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 13:01:04 -0400 Subject: [PATCH 02/33] add riotfile change --- riotfile.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/riotfile.py b/riotfile.py index 1fb41058dbf..1b0f89590ce 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2516,6 +2516,16 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): "cohere": latest, } ), + Venv( + name="anthropic", + command="pytest {cmdargs} tests/contrib/anthropic", + pys=select_pys(min_version="3.7", max_version="3.11"), + pkgs={ + "pytest-asyncio": latest, + "vcrpy": latest, + "anthropic": latest, + }, + ), Venv( pkgs={ "langchain": latest, From 159cadca4a1d46e38234f7f8765ed5c457932cca Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 13:01:52 -0400 Subject: [PATCH 03/33] llm obs integration --- ddtrace/contrib/anthropic/patch.py | 3 + ddtrace/llmobs/_integrations/anthropic.py | 100 ++++++++++++++++++++++ 2 files changed, 103 insertions(+) diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index c3872bdc735..b0a2dfa4a58 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -109,6 +109,9 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): span.finish() raise finally: + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) + span.finish() return chat_completions diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 36e3baa5aa8..e9140ef826a 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -1,9 +1,20 @@ +import json from typing import Any from typing import Dict +from typing import Iterable +from typing import List from typing import Optional from ddtrace._trace.span import Span +from ddtrace.contrib.anthropic.utils import _get_attr from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import get_argument_value +from ddtrace.llmobs._constants import INPUT_MESSAGES +from ddtrace.llmobs._constants import METADATA +from ddtrace.llmobs._constants import METRICS +from ddtrace.llmobs._constants import MODEL_NAME +from ddtrace.llmobs._constants import OUTPUT_MESSAGES +from ddtrace.llmobs._constants import SPAN_KIND from .base import BaseLLMIntegration @@ -33,3 +44,92 @@ def _set_base_span_tags( span.set_tag_str(API_KEY, f"...{str(api_key[-4:])}") else: span.set_tag_str(API_KEY, api_key) + + def llmobs_set_tags( + self, + resp: Any, + span: Span, + args: List[Any], + kwargs: Dict[str, Any], + err: Optional[Any] = None, + ) -> None: + """Extract prompt/response tags from a completion and set them as temporary "_ml_obs.*" tags.""" + if not self.llmobs_enabled: + return + + parameters = { + "temperature": float(span.get_tag("anthropic.request.parameters.temperature") or 1.0), + "max_tokens": int(span.get_tag("anthropic.request.parameters.max_tokens") or 0), + } + messages = get_argument_value(args, kwargs, 0, "messages") + input_messages = self._extract_input_message(messages) + + span.set_tag_str(SPAN_KIND, "llm") + span.set_tag_str(MODEL_NAME, span.get_tag("anthropic.request.model") or "") + span.set_tag_str(INPUT_MESSAGES, json.dumps(input_messages)) + span.set_tag_str(METADATA, json.dumps(parameters)) + if err or resp is None: + span.set_tag_str(OUTPUT_MESSAGES, json.dumps([{"content": ""}])) + else: + output_messages = self._extract_output_message(resp) + span.set_tag_str(OUTPUT_MESSAGES, json.dumps(output_messages)) + + span.set_tag_str(METRICS, json.dumps(_get_llmobs_metrics_tags(span))) + + def _extract_input_message(self, messages): + """Extract input messages from the stored prompt. + Anthropic allows for messages and multiple texts in a message, which requires some special casing. + """ + if not isinstance(messages, Iterable): + log.warning("Anthropic input must be a list of messages.") + + input_messages = [] + for message in messages: + if not isinstance(message, dict): + log.warning("Anthropic message input must be a list of message param dicts.") + continue + + content = message.get("content", None) + role = message.get("role", None) + + if role is None or content is None: + log.warning("Anthropic input message must have content and role.") + + if isinstance(content, str): + input_messages.append({"content": content, "role": role}) + + elif isinstance(content, list): + for block in content: + if block.get("type") == "text": + input_messages.append({"content": block.get("text", ""), "role": role}) + elif block.get("type") == "image": + # Store a placeholder for potentially enormous binary image data. + input_messages.append({"content": "([IMAGE DETECTED])", "role": role}) + else: + input_messages.append({"content": str(block), "role": role}) + + return input_messages + + def _extract_output_message(self, response): + """Extract output messages from the stored response.""" + output_messages = [] + content = _get_attr(response, "content", None) + role = _get_attr(response, "role", "") + + if isinstance(content, str): + return [{"content": self.trunc(content), "role": role}] + + elif isinstance(content, list): + for completion in content: + text = _get_attr(completion, "text", None) + if isinstance(text, str): + output_messages.append({"content": self.trunc(text), "role": role}) + return output_messages + + +def _get_llmobs_metrics_tags(span): + return { + "input_tokens": span.get_metric("anthropic.response.usage.input_tokens"), + "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), + "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), + } From 11a6861825795b2e72bba9079f6b79873b5d1079 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 15:26:48 -0400 Subject: [PATCH 04/33] add tests --- ddtrace/contrib/anthropic/utils.py | 18 ++-- ddtrace/llmobs/_integrations/anthropic.py | 6 +- .../cassettes/anthropic_hello_world.yaml | 86 +++++++++++++++++++ tests/contrib/anthropic/conftest.py | 42 ++++++++- tests/contrib/anthropic/test_anthropic.py | 6 -- .../anthropic/test_anthropic_llmobs.py | 65 ++++++++++++++ 6 files changed, 203 insertions(+), 20 deletions(-) create mode 100644 tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml create mode 100644 tests/contrib/anthropic/test_anthropic_llmobs.py diff --git a/ddtrace/contrib/anthropic/utils.py b/ddtrace/contrib/anthropic/utils.py index 2833d3d05ef..5eb8f576f39 100644 --- a/ddtrace/contrib/anthropic/utils.py +++ b/ddtrace/contrib/anthropic/utils.py @@ -19,12 +19,12 @@ def _get_attr(o: Any, attr: str, default: Any): def record_usage(span: Span, usage: Dict[str, Any]) -> None: if not usage: return - for token_type in ("input", "output"): - num_tokens = _get_attr(usage, "%s_tokens" % token_type, None) - if num_tokens is None: - continue - span.set_metric("anthropic.response.usage.%s_tokens" % token_type, num_tokens) - - if "input" in usage and "output" in usage: - total_tokens = usage["output"] + usage["input"] - span.set_metric("anthropic.response.usage.total_tokens", total_tokens) + + input_tokens = _get_attr(usage, "input_tokens", None) + output_tokens = _get_attr(usage, "output_tokens", None) + + span.set_metric("anthropic.response.usage.input_tokens", input_tokens) + span.set_metric("anthropic.response.usage.output_tokens", output_tokens) + + if input_tokens is not None and output_tokens is not None: + span.set_metric("anthropic.response.usage.total_tokens", input_tokens + output_tokens) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index e9140ef826a..378271d1e55 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -13,6 +13,7 @@ from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS from ddtrace.llmobs._constants import MODEL_NAME +from ddtrace.llmobs._constants import MODEL_PROVIDER from ddtrace.llmobs._constants import OUTPUT_MESSAGES from ddtrace.llmobs._constants import SPAN_KIND @@ -58,8 +59,8 @@ def llmobs_set_tags( return parameters = { - "temperature": float(span.get_tag("anthropic.request.parameters.temperature") or 1.0), - "max_tokens": int(span.get_tag("anthropic.request.parameters.max_tokens") or 0), + "temperature": float(kwargs.get("temperature", 1.0)), + "max_tokens": float(kwargs.get("max_tokens", 0)), } messages = get_argument_value(args, kwargs, 0, "messages") input_messages = self._extract_input_message(messages) @@ -68,6 +69,7 @@ def llmobs_set_tags( span.set_tag_str(MODEL_NAME, span.get_tag("anthropic.request.model") or "") span.set_tag_str(INPUT_MESSAGES, json.dumps(input_messages)) span.set_tag_str(METADATA, json.dumps(parameters)) + span.set_tag_str(MODEL_PROVIDER, "anthropic") if err or resp is None: span.set_tag_str(OUTPUT_MESSAGES, json.dumps([{"content": ""}])) else: diff --git a/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml b/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml new file mode 100644 index 00000000000..ecc04fc5621 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml @@ -0,0 +1,86 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Reply: ''Hello World!'' when I say: ''Hello''"}, {"type": "text", "text": + "Hello"}]}, {"role": "assistant", "content": "Hello World!"}, {"role": "user", + "content": [{"type": "text", "text": "Hello"}]}], "model": "claude-3-opus-20240229", + "temperature": 0.8}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '340' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0yOzWrDMBCEX6WdswyuHArRrRBI6TGXHkIxxtoEE3nX1a5CgvG7F4cWehr45oeZ + MUQEjHpu65f97uO+yftyulxPh21zGMvV797gYPeJ1hSpdmeCQ5a0gk51UOvY4DBKpISAPnUlUtVU + MhWtfO03tfdbOPTCRmwIx/lv0Oi2Vh8S8E4pydOn5BSfsXw5qMnUZupUGAHEsbWSGb+G0nch7gmB + S0oO5fEtzBh4KtaaXIgVoWkcpNh/9LosPwAAAP//AwDPtjn1+AAAAA== + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e1b7e91ae042d0-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 18:24:10 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T18:24:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T18:24:57Z' + request-id: + - req_01Ey5yndaLUmUmn1A6YDSJrr + via: + - 1.1 google + x-cloud-trace-context: + - fd17395b60d4b6d19c95418c5797b164 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index fe0010849e6..2c9c08b2d96 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -1,22 +1,31 @@ import os +import mock import pytest from ddtrace import Pin from ddtrace.contrib.anthropic.patch import patch from ddtrace.contrib.anthropic.patch import unpatch +from ddtrace.llmobs import LLMObs from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_config from tests.utils import override_env from tests.utils import override_global_config +from .utils import get_request_vcr + @pytest.fixture def ddtrace_config_anthropic(): return {} +@pytest.fixture +def ddtrace_global_config(): + return {} + + @pytest.fixture def snapshot_tracer(anthropic): pin = Pin.get_from(anthropic) @@ -24,17 +33,39 @@ def snapshot_tracer(anthropic): @pytest.fixture -def mock_tracer(anthropic): +def mock_tracer(ddtrace_global_config, anthropic): pin = Pin.get_from(anthropic) mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) pin.override(anthropic, tracer=mock_tracer) pin.tracer.configure() + if ddtrace_global_config.get("_llmobs_enabled", False): + # Have to disable and re-enable LLMObs to use to mock tracer. + LLMObs.disable() + LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) yield mock_tracer @pytest.fixture -def anthropic(ddtrace_config_anthropic): - with override_global_config({"_dd_api_key": ""}): +def mock_llmobs_writer(scope="session"): + patcher = mock.patch("ddtrace.llmobs._llmobs.LLMObsSpanWriter") + try: + LLMObsSpanWriterMock = patcher.start() + m = mock.MagicMock() + LLMObsSpanWriterMock.return_value = m + yield m + finally: + patcher.stop() + + +def default_global_config(): + return {"_dd_api_key": ""} + + +@pytest.fixture +def anthropic(ddtrace_global_config, ddtrace_config_anthropic): + global_config = default_global_config() + global_config.update(ddtrace_global_config) + with override_global_config(global_config): with override_config("anthropic", ddtrace_config_anthropic): with override_env( dict( @@ -46,3 +77,8 @@ def anthropic(ddtrace_config_anthropic): yield anthropic unpatch() + + +@pytest.fixture(scope="session") +def request_vcr(): + yield get_request_vcr() diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 4e0d0a63bc9..b152a3b0512 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -1,14 +1,8 @@ import pytest -from tests.contrib.anthropic.utils import get_request_vcr from tests.utils import override_global_config -@pytest.fixture(scope="session") -def request_vcr(): - yield get_request_vcr() - - def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): """ When the global config UST tags are set diff --git a/tests/contrib/anthropic/test_anthropic_llmobs.py b/tests/contrib/anthropic/test_anthropic_llmobs.py new file mode 100644 index 00000000000..a529e8bd7c3 --- /dev/null +++ b/tests/contrib/anthropic/test_anthropic_llmobs.py @@ -0,0 +1,65 @@ +import pytest + +from tests.llmobs._utils import _expected_llmobs_llm_span_event + + +@pytest.mark.parametrize( + "ddtrace_global_config", [dict(_llmobs_enabled=True, _llmobs_sample_rate=1.0, _llmobs_ml_app="")] +) +class TestLLMObsAnthropic: + def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): + """Ensure llmobs records are emitted for completion endpoints when configured. + + Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. + """ + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_hello_world.yaml"): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Reply: 'Hello World!' when I say: 'Hello'", + }, + { + "type": "text", + "text": "Hello", + }, + ], + }, + {"role": "assistant", "content": "Hello World!"}, + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Hello", + } + ], + }, + ], + temperature=0.8, + ) + span = mock_tracer.pop_traces()[0][0] + assert mock_llmobs_writer.enqueue.call_count == 1 + mock_llmobs_writer.enqueue.assert_called_with( + _expected_llmobs_llm_span_event( + span, + model_name="claude-3-opus-20240229", + model_provider="anthropic", + input_messages=[ + {"content": "Reply: 'Hello World!' when I say: 'Hello'", "role": "user"}, + {"content": "Hello", "role": "user"}, + {"content": "Hello World!", "role": "assistant"}, + {"content": "Hello", "role": "user"}, + ], + output_messages=[{"content": "Hello World!", "role": "assistant"}], + metadata={"temperature": 0.8, "max_tokens": 15}, + token_metrics={"input_tokens": 33, "output_tokens": 6, "total_tokens": 39}, + tags={"ml_app": ""}, + ) + ) From 716b6ba9bd17f41d13c414b7f951814acbdb1213 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 15:34:40 -0400 Subject: [PATCH 05/33] more clean up --- ddtrace/contrib/anthropic/patch.py | 63 +++++++++++------------ ddtrace/contrib/anthropic/utils.py | 16 ------ ddtrace/llmobs/_integrations/anthropic.py | 13 +++++ 3 files changed, 44 insertions(+), 48 deletions(-) diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index c3872bdc735..cde6c35a898 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -2,6 +2,7 @@ import os import sys from typing import Any +from typing import Optional import anthropic @@ -15,7 +16,6 @@ from ddtrace.pin import Pin from .utils import _get_attr -from .utils import record_usage log = get_logger(__name__) @@ -35,7 +35,7 @@ def get_version(): ) -def _extract_api_key(instance: Any) -> str: +def _extract_api_key(instance: Any) -> Optional[str]: """ Extract and format LLM-provider API key from instance. """ @@ -65,39 +65,38 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = None try: for message_idx, message in enumerate(chat_messages): - if isinstance(message, dict): - if isinstance(message.get("content", None), str): - if integration.is_pc_sampled_span(span) and message.get("content", "") != "": - span.set_tag_str( - "anthropic.request.messages.%d.content.0.text" % (message_idx), - integration.trunc(message.get("content", "")), - ) + if isinstance(message.get("content", None), str): + if integration.is_pc_sampled_span(span): span.set_tag_str( - "anthropic.request.messages.%d.content.0.type" % (message_idx), - "text", + "anthropic.request.messages.%d.content.0.text" % (message_idx), + integration.trunc(message.get("content", "")), ) - elif isinstance(message.get("content", None), list): - for block_idx, block in enumerate(message.get("content", [])): - if integration.is_pc_sampled_span(span): - if block.get("type", None) == "text" and block.get("text", "") != "": - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - integration.trunc(str(block.get("text", ""))), - ) - elif block.get("type", None) == "image": - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - "([IMAGE DETECTED])", - ) - - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), - block.get("type", "text"), - ) span.set_tag_str( - "anthropic.request.messages.%d.role" % (message_idx), - message.get("role", ""), + "anthropic.request.messages.%d.content.0.type" % (message_idx), + "text", ) + elif isinstance(message.get("content", None), list): + for block_idx, block in enumerate(message.get("content", [])): + if integration.is_pc_sampled_span(span): + if block.get("type", None) == "text": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + integration.trunc(str(block.get("text", ""))), + ) + elif block.get("type", None) == "image": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + "([IMAGE DETECTED])", + ) + + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), + block.get("type", "text"), + ) + span.set_tag_str( + "anthropic.request.messages.%d.role" % (message_idx), + message.get("role", ""), + ) params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) @@ -131,7 +130,7 @@ def handle_non_streamed_response(integration, chat_completions, args, kwargs, sp span.set_tag_str("anthropic.response.completions.role", chat_completions.role) usage = _get_attr(chat_completions, "usage", {}) - record_usage(span, usage) + integration.record_usage(span, usage) def patch(): diff --git a/ddtrace/contrib/anthropic/utils.py b/ddtrace/contrib/anthropic/utils.py index 2833d3d05ef..8830ca49456 100644 --- a/ddtrace/contrib/anthropic/utils.py +++ b/ddtrace/contrib/anthropic/utils.py @@ -1,7 +1,5 @@ from typing import Any -from typing import Dict -from ddtrace._trace.span import Span from ddtrace.internal.logger import get_logger @@ -14,17 +12,3 @@ def _get_attr(o: Any, attr: str, default: Any): return o.get(attr, default) else: return getattr(o, attr, default) - - -def record_usage(span: Span, usage: Dict[str, Any]) -> None: - if not usage: - return - for token_type in ("input", "output"): - num_tokens = _get_attr(usage, "%s_tokens" % token_type, None) - if num_tokens is None: - continue - span.set_metric("anthropic.response.usage.%s_tokens" % token_type, num_tokens) - - if "input" in usage and "output" in usage: - total_tokens = usage["output"] + usage["input"] - span.set_metric("anthropic.response.usage.total_tokens", total_tokens) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 36e3baa5aa8..cffb9c10996 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -3,6 +3,7 @@ from typing import Optional from ddtrace._trace.span import Span +from ddtrace.contrib.anthropic.utils import _get_attr from ddtrace.internal.logger import get_logger from .base import BaseLLMIntegration @@ -33,3 +34,15 @@ def _set_base_span_tags( span.set_tag_str(API_KEY, f"...{str(api_key[-4:])}") else: span.set_tag_str(API_KEY, api_key) + + def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: + if not usage: + return + input_tokens = _get_attr(usage, "input_tokens", None) + output_tokens = _get_attr(usage, "output_tokens", None) + + span.set_metric("anthropic.response.usage.input_tokens", input_tokens) + span.set_metric("anthropic.response.usage.output_tokens", output_tokens) + + if input_tokens is not None and output_tokens is not None: + span.set_metric("anthropic.response.usage.total_tokens", input_tokens + output_tokens) From 92baacc0d3d0436cc8f67f7622e29fcbb3ad91f3 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 15:51:52 -0400 Subject: [PATCH 06/33] more changes --- ddtrace/contrib/anthropic/patch.py | 9 +- .../anthropic_completion_sync_stream.yaml | 382 ++++++++++++++++++ tests/contrib/anthropic/test_anthropic.py | 27 +- ...st_anthropic.test_anthropic_llm_error.json | 2 +- ...est_anthropic.test_anthropic_llm_sync.json | 3 +- ...t_anthropic_llm_sync_multiple_prompts.json | 3 +- ...nc_multiple_prompts_with_chat_history.json | 3 +- ...hropic.test_anthropic_llm_sync_stream.json | 32 ++ 8 files changed, 454 insertions(+), 7 deletions(-) create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index cde6c35a898..9d6407488b8 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -54,7 +54,7 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): span = integration.trace( pin, - "%s.%s.%s" % (instance.__module__, instance.__class__.__name__, operation_name), + "%s.%s" % (instance.__class__.__name__, operation_name), submit_to_llmobs=True, interface_type="chat_model", provider="anthropic", @@ -65,6 +65,8 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = None try: for message_idx, message in enumerate(chat_messages): + if not isinstance(message, dict): + continue if isinstance(message.get("content", None), str): if integration.is_pc_sampled_span(span): span.set_tag_str( @@ -102,7 +104,10 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = func(*args, **kwargs) - handle_non_streamed_response(integration, chat_completions, args, kwargs, span) + if not isinstance(chat_completions, anthropic.Stream) and not isinstance( + chat_completions, anthropic.lib.streaming._messages.MessageStreamManager + ): + handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) span.finish() diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml new file mode 100644 index 00000000000..b949f366bed --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml @@ -0,0 +1,382 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": + "text", "text": "Can you explain what Descartes meant by ''I think, therefore + I am''?"}]}], "model": "claude-3-opus-20240229", "stream": true}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '212' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01FEMDqxXS12RxKs3fDbyQSQ\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-opus-20240229\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":27,\"output_tokens\":1}}}\n\nevent: + content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} + \ }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: + {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"The\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + phrase\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + think\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + I\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + am\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + (\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"originally\"}}\n\nevent: + content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Latin\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + as\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Cog\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ito\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + er\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"go\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + sum\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\")\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + philosophical\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + by\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Ren\xE9\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Des\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + French\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + philosopher\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + mathematician\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + scientist\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"17\"}}\n\nevent: + content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"th\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + century\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + This\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + part\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + approach\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + epis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tem\"} + }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ology\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + theory\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nDes\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + seeking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + foundation\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + beyon\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + He\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + use\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + metho\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + systematic\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + questioning\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + doub\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"te\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + until\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + reache\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + something\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + in\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dub\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"itable\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"}}\n\nevent: + content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nHe\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + realize\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + body\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + external\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + worl\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d,\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + almost\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + However\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + couldn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'t\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + own\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + min\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d,\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + because\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + very\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + act\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + doub\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ting\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + requires\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + thought\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + thought\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + requires\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"hin\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ker\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nTherefore\"}}\n\nevent: + content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + prove\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Even\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + were\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + dece\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ive\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + about\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + couldn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'t\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + dece\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ive\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + about\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + existe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d.\"} + }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nIn\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + other\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + words\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + think\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + I\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + am\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + means\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + act\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + itself\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + proof\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + one\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s\"}}\n\nevent: + content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + It\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + foun\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dational\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + principle\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Des\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + believe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + all\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + other\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + derive\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d.\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThis\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + has\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + been\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + influential\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + development\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + Western\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + philosophy\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + particularly\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + fields\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + epis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tem\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ology\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + metaph\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ys\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ics\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + It\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + marks\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + significant\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + break\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + medieval\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + schol\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ast\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"icism\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d + a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + turn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + towards\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + subj\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ective\"} + \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + individual\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + as\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + foundation\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + philosophical\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" + inquiry\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} + \ }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 + \ }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"output_tokens\":311} + \ }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\"}\n\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e232357b2c19c3-EWR + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 03 Jun 2024 19:47:39 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T19:47:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '9000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T19:47:57Z' + request-id: + - req_01JP4anL918nDaHauSNAs9Hu + via: + - 1.1 google + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 4e0d0a63bc9..17f669256f5 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -27,7 +27,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac ) span = mock_tracer.pop_traces()[0][0] - assert span.resource == "anthropic.resources.messages.Messages.create" + assert span.resource == "Messages.create" assert span.service == "test-svc" assert span.get_tag("env") == "staging" assert span.get_tag("version") == "1234" @@ -113,3 +113,28 @@ def test_anthropic_llm_error(anthropic, request_vcr): with pytest.raises(invalid_error): with request_vcr.use_cassette("anthropic_completion_error.yaml"): llm.messages.create(model="claude-3-opus-20240229", max_tokens=1024, messages=["Invalid content"]) + + +@pytest.mark.snapshot() +def test_anthropic_llm_sync_stream(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_sync_stream.yaml"): + stream = llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + }, + ], + stream=True, + ) + for chunk in stream: + print(chunk.type) + diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json index 86ddb4ffd9d..788829c06b0 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "anthropic.resources.messages.Messages.create", + "resource": "Messages.create", "trace_id": 0, "span_id": 1, "parent_id": 0, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json index bf80302b51b..1faae35033c 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "anthropic.resources.messages.Messages.create", + "resource": "Messages.create", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -31,6 +31,7 @@ "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 27, "anthropic.response.usage.output_tokens": 303, + "anthropic.response.usage.total_tokens": 330, "process_id": 37192 }, "duration": 2370000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json index be993323a18..3e291a64fd8 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "anthropic.resources.messages.Messages.create", + "resource": "Messages.create", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -33,6 +33,7 @@ "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 38, "anthropic.response.usage.output_tokens": 337, + "anthropic.response.usage.total_tokens": 375, "process_id": 37192 }, "duration": 2667000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json index 989d3ab711f..a349a381feb 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "anthropic.resources.messages.Messages.create", + "resource": "Messages.create", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -41,6 +41,7 @@ "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 84, "anthropic.response.usage.output_tokens": 155, + "anthropic.response.usage.total_tokens": 239, "process_id": 88493 }, "duration": 4876000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json new file mode 100644 index 00000000000..288415a3b5a --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json @@ -0,0 +1,32 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e1e0700000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024, \"stream\": true}", + "language": "python", + "runtime-id": "93b0a0a29f0140f29375dc8bf89847b9" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 42080 + }, + "duration": 44181000, + "start": 1717444103186786000 + }]] From 459f6bb7a542aee7d41d7f593394450a88159b72 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 16:01:10 -0400 Subject: [PATCH 07/33] add init comment --- ddtrace/contrib/anthropic/__init__.py | 83 ++++++++++++++++++++++- tests/contrib/anthropic/test_anthropic.py | 1 - 2 files changed, 82 insertions(+), 2 deletions(-) diff --git a/ddtrace/contrib/anthropic/__init__.py b/ddtrace/contrib/anthropic/__init__.py index aeff9842012..4be873eac84 100644 --- a/ddtrace/contrib/anthropic/__init__.py +++ b/ddtrace/contrib/anthropic/__init__.py @@ -1,5 +1,86 @@ """ -Do later. +The Anthropic integration instruments the Anthropic Python library to traces for requests made to the models for messages. + +All traces submitted from the Anthropic integration are tagged by: + +- ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. +- ``anthropic.request.model``: Anthropic model used in the request. +- ``anthropic.request.api_key``: Anthropic API key used to make the request (obfuscated to match the Anthropic UI representation ``sk-...XXXX`` where ``XXXX`` is the last 4 digits of the key). +- ``anthropic.request.parameters``: Parameters used in anthropic package call. + + +(beta) Prompt and Completion Sampling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following data is collected in span tags with a default sampling rate of ``1.0``: + +- Prompt inputs and completions for the ``Messages.create`` endpoint. + + +Enabling +~~~~~~~~ + +The Anthropic integration is enabled automatically when you use +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Note that these commands also enable the ``requests`` and ``aiohttp`` +integrations which trace HTTP requests from the Anthropic library. + +Alternatively, use :func:`patch() ` to manually enable the Anthropic integration:: + + from ddtrace import config, patch + + patch(anthropic=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.anthropic["service"] + + The service name reported by default for Anthropic requests. + + Alternatively, you can set this option with the ``DD_SERVICE`` or ``DD_ANTHROPIC_SERVICE`` environment + variables. + + Default: ``DD_SERVICE`` + + +.. py:data:: (beta) ddtrace.config.anthropic["span_char_limit"] + + Configure the maximum number of characters for the following data within span tags: + + - Message inputs and completions + + Text exceeding the maximum number of characters is truncated to the character limit + and has ``...`` appended to the end. + + Alternatively, you can set this option with the ``DD_ANTHROPIC_SPAN_CHAR_LIMIT`` environment + variable. + + Default: ``128`` + + +.. py:data:: (beta) ddtrace.config.anthropic["span_prompt_completion_sample_rate"] + + Configure the sample rate for the collection of prompts and completions as span tags. + + Alternatively, you can set this option with the ``DD_ANTHROPIC_SPAN_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``1.0`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the Anthropic integration on a per-instance basis use the +``Pin`` API:: + + import anthropic + from ddtrace import Pin, config + + Pin.override(anthropic, service="my-anthropic-service") """ # noqa: E501 from ...internal.utils.importlib import require_modules diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 17f669256f5..4989b756bf3 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -137,4 +137,3 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): ) for chunk in stream: print(chunk.type) - From d25e7b83469aa0b920220473a412784b140f74fb Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 16:08:01 -0400 Subject: [PATCH 08/33] reduce max tokens --- .../cassettes/anthropic_completion_error.yaml | 18 +- .../cassettes/anthropic_completion_sync.yaml | 48 +- ...nthropic_completion_sync_global_tags.yaml} | 46 +- ...nthropic_completion_sync_multi_prompt.yaml | 55 +-- ...n_sync_multi_prompt_with_chat_history.yaml | 45 +- .../anthropic_completion_sync_stream.yaml | 443 +++++------------- tests/contrib/anthropic/test_anthropic.py | 12 +- ...st_anthropic.test_anthropic_llm_error.json | 16 +- ...est_anthropic.test_anthropic_llm_sync.json | 76 +-- ...t_anthropic_llm_sync_multiple_prompts.json | 22 +- ...nc_multiple_prompts_with_chat_history.json | 20 +- ...hropic.test_anthropic_llm_sync_stream.json | 12 +- 12 files changed, 300 insertions(+), 513 deletions(-) rename tests/contrib/anthropic/cassettes/{anthropic_completion_sync_39.yaml => anthropic_completion_sync_global_tags.yaml} (51%) diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml index bbaf3267206..9a62acea110 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"max_tokens": 1024, "messages": ["Invalid content"], "model": "claude-3-opus-20240229"}' + body: '{"max_tokens": 15, "messages": ["Invalid content"], "model": "claude-3-opus-20240229"}' headers: accept: - application/json @@ -11,13 +11,13 @@ interactions: connection: - keep-alive content-length: - - '88' + - '86' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.26.1 + - Anthropic/Python 0.28.0 x-stainless-arch: - arm64 x-stainless-async: @@ -27,7 +27,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.26.1 + - 0.28.0 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -42,7 +42,7 @@ interactions: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88c85bd75e437274-EWR + - 88e24ced7c9f4265-EWR Connection: - keep-alive Content-Length: @@ -50,18 +50,18 @@ interactions: Content-Type: - application/json Date: - - Fri, 31 May 2024 16:32:14 GMT + - Mon, 03 Jun 2024 20:05:52 GMT Server: - cloudflare request-id: - - req_01N7iW6qh7wHr9je4z3FDn2n + - req_01LsGyzwBtnCxAUjeyT4tmSs via: - 1.1 google x-cloud-trace-context: - - cf561ed8cbadcfc1748718321572db36 + - bbb9c1f0aa9c1d6f521121102e32ca2d x-should-retry: - 'false' status: code: 400 message: Bad Request -version: 1 \ No newline at end of file +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml index 1f8d5f0500b..247fd016a79 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml @@ -1,8 +1,8 @@ interactions: - request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": - "text", "text": "Can you explain what Descartes meant by ''I think, therefore - I am''?"}]}], "model": "claude-3-opus-20240229"}' + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229"}' headers: accept: - application/json @@ -13,13 +13,13 @@ interactions: connection: - keep-alive content-length: - - '196' + - '194' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.26.1 + - Anthropic/Python 0.28.0 x-stainless-arch: - arm64 x-stainless-async: @@ -29,7 +29,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.26.1 + - 0.28.0 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -39,26 +39,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA3xU224cNwz9FUIvaYHxwlkHNbqPberaRQoURZAC6RaGVuKsVGvEiUjtemP4g/od - /bGAmr0Bbvo0wIg8PDw85JOJ3izMwOv7y9e/ucz05u/rYfdhc/PLx3efw/Vt+MF0RnYjahQy2zWa - zhRK+sMyRxabxXRmII/JLIxLtnq8uLqgsfLF/HL+5nI+/950xlEWzGIWfz4dAAUfNbV9FuZ9QBhD - sYywNHcgIeaHDiRgwZ4Kwh3YYWkgMliQYjMnK5EyUK9B8M5KzCeAH2kdhTrAsibgOnRLA9sQXYCt - ZXAUM3pY7VrqTcHsAowhJmIaAxb4HfO//8BbZGeLIEPMECLDlsoDLM3byI5qYQTKDeFXlEB+aeCb - 199dXX87g/cazWIFB8yipDWsp5r9kfUR/dWp9A5s9lOPjkrGwkIZNfoPZMGSz0Jny7zMJ4raF6Mt - LsS8hp4K2AyxFOyr2FVCkFIlgAQr4KgmD4xlg2C12BmzlpkSkEoPD5m2Cf0aZ3CLgMOYaIceLAyt - ZWXGOxYcrEQHnupKOvhUkRVLieAGy04ap4CwwhRxgx6EYNUoYQcxu1S9RqhI+BhZMDs8TBYftXGb - VP3km0C4wWkgtM2wIj9pcUtbrdadza2gTfGz1tO2m5maUCrqkdSkRiaZ+C+mWVknU1aYUpohY17P - 4CetHnt9aAnoz7vExNh8C/oPFIb6KVCfqRyRYCy0OXAL+6T2qE0MlWUSYwZ3eT+OLRXPU+Ae+AgW - hTH16p2xEPX6Rhlf8UnQJtJNoUGTDtq0oZ9L5qiN40yz/1JrheCwiI3NyqrPYRyn+TVnHfmtsInX - NmOFzg7YsFfoC7kHqCPl/YaeitSYfAM+uj46m/aOm5zAau7I4aV3JsUab+6AqwvK6GeaLPTSWU2e - u6zXYrBl1/3vGcLs7Mg1WZXsbJM3Ebcn4V7MaLoDA7FArxun18Gmr05sz7ShaW7b4JfH5Gsra57/ - 6gwLjfcFLVM2C4PZ30st2ewfGD9VrWQWuabUmdpO/OLJxDxWuRd6wMxmMb/uDFU5/3V1efX8/AUA - AP//AwAfgEAQQQYAAA== + H4sIAAAAAAAAA0xPy2rDMBD8FbGnHmRw3EeormkPKT2FQilNMSLeSiLyytGumgTjfy8ODfQ0MC9m + RggdGOjZtfXi+egPzebnyb24j2W/csfN6v3BgwY5Dzi7kNk6BA05xZmwzIHFkoCGPnUYwcAu2tJh + dVuloXDV1M1d3TSPoGGXSJAEzOd4LRQ8zdELGHjzqAafLaPawlqJD7TXSjxm/E4Z1VrZfgvqJuXg + AtkYzyqQerUSSFmG6UsDSxrajJYTzXvtqZW0R2L4kxgPBWmHYKjEqKFc/pgRAg1FrmbTLDWkIv+p + xf00/QIAAP//AwAjDM/sLQEAAA== headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88c85b052be7428b-EWR + - 88e24ceedab20cb0-EWR Connection: - keep-alive Content-Encoding: @@ -66,7 +56,7 @@ interactions: Content-Type: - application/json Date: - - Fri, 31 May 2024 16:31:57 GMT + - Mon, 03 Jun 2024 20:05:54 GMT Server: - cloudflare Transfer-Encoding: @@ -74,21 +64,21 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '5' + - '1' anthropic-ratelimit-requests-reset: - - '2024-05-31T16:32:57Z' + - '2024-06-03T20:05:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - - '10000' + - '9000' anthropic-ratelimit-tokens-reset: - - '2024-05-31T16:32:57Z' + - '2024-06-03T20:05:57Z' request-id: - - req_01Ybd82xxyNova6PBsMeHobV + - req_01APGLDxmWmg64SznQbxJTHy via: - 1.1 google x-cloud-trace-context: - - 1ba1eaa11fc86ede89ae23d3ae4aefe1 + - 0c2fa5913c47bc6b0a3e8a2661af4a7b status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml similarity index 51% rename from tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml rename to tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml index 479ace5a990..ed4e63bcccd 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_39.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml @@ -38,27 +38,27 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA4RVzY4bNwx+FUKXtsBkkfWmReJbD+2mQNAekiBo62JBjzgeYjXkVKTsToK8Sd6m - L1ZI9m686aI9GdBQ1PfDj/4QOIZ1mGx38/Ty27e/rn4cvnvxdvltpJfXr6bl1ffvY+iCLzPVKjLD - HYUuZE31AM3YHMVDFyaNlMI69AlLpCdXT3Qu9mT1dPXs6Wr1InShV3ESD+vfP9w1dPqrXm0/6/Bu - JIGfmfy99SPBgJMWSwtE6hNmirAJ1xqBDSJh3ARggZENtqq3sAlvRoJrXOB1zyQ9bQJ8ffn8+eqb - DkaCAxqIOiR2ypjSAlZ2OzJn2YGP6IAQec9CsKV6NmKEyBQv4CcxJ4z3bSa8rQUI88hJTeeRe0yA - EqEvyUvGBLo1ynt0VgHcanHwkRqR+oIO4Bkj18+YIFPiHWsx2FJiGlqrWt+rGP1ZKhtrlypbG3nw - Sv0dmVMWMO2ZfLnYyEZeUibATGA6EdzSArOyuIErFImUq1vxBOle6q8MzNFpIvF1bXN5AW8e4v2M - EYuPmtmX9ZlXmHeF4lHIivwHSbwbnaS27MCqJc4D94Bxj9K3p6y7Z5rZ2jNGfUmYYSwTCtvUbKCs - kY51gxaJTVb7PxWbHqsjkaTW6nFrmooTTFpd2mMqZGt4xz5+adDJCha41tidUe0T8nTO9c6Hg+YU - G+Ck5sBusEVjg0Hz4y93kAhjm0AFBCM5qiA8cqrsqzyRbEbOjc7VkY4QxdY1U23U9Dh3o2Hfn2Pc - hEjoY21+rTU6cyYjcYqAAjrPmr0I+9LaHtX3pcLqM6HXJw8n0A3URCgsO+sqRYqgAiw1P7F8GQUc - Bs7TMQo6QOKBIKOPlCs8OfMtau+ZhRrVZ0eqd4NRSfz9aUt5IrF+XD8Y3l6lp9nv6jbhrHITQDNs - wusyU55QNgEyndhbK+dINbFDTTRlU4HDqEDTNmPNXQXcnWRoFziDHuTewkq2orSaqj21lokn9sfH - tPlftWUBPM3MgX2sgbzW2Lj/sm87qntAcs7aJ/ws5KO2sgHCRI7zqLl5WcvmrC04/94dR5eokbhf - Zwug/feCOuUktnt1NA64NKafd8xxr9KJYZuIKmE9Pg0P0ER5R/EifPyjC+Y632RCUwnrQBJvvGQJ - pw93azCspaTUhdL+h9YfAstc/Mb1lsTCerXqghY/P7q6uvz48R8AAAD//wMAxlhmZuYGAAA= + H4sIAAAAAAAAA3SV3W4bNxCFX2XAm6bAWrDlNE10G7duUKA3KRAgVWCMlqPlQFzOhjOUohp+k7xN + X6wgJcWKk1wJoubvfGdI3Tv2buFGHe4ur27k/fvX4/rq1WZVbl6Emxe/8u9eXOdsP1GNIlUcyHUu + S6wHqMpqmMx1bhRP0S1cH7F4uri+kKnoxfxy/vxyPn/lOtdLMkrmFv/cnwoafaqp7WPh3gVK8BeT + /at9IFjjKEXjHqYsfUQeycPS3YoHVvCEfumAEwRWWIlsYOn+DgS3uIe3PVPqaeng2dXLl/OfOwgE + O1RIYjDihtMACJGNMkZQQ6ORkgGupBhYIKBPrFZrgKzhVvwM3iQ1Qv+lVC9jzamlJLUcT33kVA84 + rWM5ZWeKPLAkwOTBMno2loQRRskY2fZVxDtSo5xApWey/WyZlulP2sMknExrREmecmXtawerqqeQ + UWlRY69mZ+BWFJm25MECHuT8liIPwSjVkbt2lFnbeEp9iZhhChxFZQpM2rVZtVI0XnMP6LeY+gZJ + IaCHWKsL4FE01Qnt1Hpdv1WfmuJA8DpkVmNMB82wzjjSTvKm6ZzPvrFVyzCQmj4qOAenUvKB7aHc + FmMhbd1GwqMDEHlNHewC96EZNmSpDH397eRJdxAjasCmMMmOcquTKVKTXKPrZj/x5vqcN+ahPNJm + rQX1zPqisEa2ADspsbLDI7yJMouvkYkDR9axDky5dvW8ZV8w6jFLLZdhiFQz13wmtY47lTyJnlzg + DJG3pG3S5zP44+lKhDJiqps3FjXoM6FRAyC79AOa1QNc6ZOlLgpDYV9BdUe3TwwOU2f6WDgTIGS6 + oFoaq4m1xPeuQpv4lzO2P9WLlnqarKUEgqX77/OK8khJ+7B0IBmW7m2ZKI+Ylq5ZXftvWY+NvmIZ + BPo2mWwp9zLSF/Sw2h9QHO5XpfgExxnlulxt2DcJSBuW7tvHSQFhJMMpSOb+B09NHzANtWdCK/lx + ras/pwuUiDysJX+lxQQMNwSZdJKkvOKWU8O+o+PMS4Sd5OiPq3buw6Oth4tcX0yIkgbKEKRu4Q73 + M/fwoXNqMt1lQpXkFo6Sv7OSkzv+oPSxPYBukUqMnSvtf2Nx7zhNxe5MNpTULebzzkmx86Prq/nD + w/8AAAD//wMA/u+dGZYGAAA= headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e11b85181a0cc4-EWR + - 88e24c811f331865-EWR Connection: - keep-alive Content-Encoding: @@ -66,7 +66,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 16:37:42 GMT + - Mon, 03 Jun 2024 20:05:50 GMT Server: - cloudflare Transfer-Encoding: @@ -74,21 +74,21 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '4' + - '3' anthropic-ratelimit-requests-reset: - - '2024-06-03T16:37:57Z' + - '2024-06-03T20:05:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T16:37:57Z' + - '2024-06-03T20:05:57Z' request-id: - - req_01End84WeJzYrMjenfX3msVw + - req_01RiCD4awdkdHENeXbiiJ3qF via: - 1.1 google x-cloud-trace-context: - - 884af431d7fdfbe4b21bde7aeefd24d6 + - 9dc6c7c173695d452740285b9cc1bd66 status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml index 199883b838a..fbd3e79ade3 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml @@ -1,9 +1,9 @@ interactions: - request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": - "text", "text": "Hello, I am looking for information about some books!"}, {"type": - "text", "text": "Can you explain what Descartes meant by ''I think, therefore - I am''?"}]}], "model": "claude-3-opus-20240229"}' + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, I am looking for information about some books!"}, {"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229"}' headers: accept: - application/json @@ -14,13 +14,13 @@ interactions: connection: - keep-alive content-length: - - '279' + - '277' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.26.1 + - Anthropic/Python 0.28.0 x-stainless-arch: - arm64 x-stainless-async: @@ -30,7 +30,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.26.1 + - 0.28.0 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -40,27 +40,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA3xV224bRwz9FWJeagNrwZfGSfRWNBe7bV6KJIVRFcZol6tlNUtuhhwpW8Mf1O/o - jwUzK1l2kPRFC81yeA4PD7l3jho3d72ubk/Pfvn4xzs5f/uBbq4uPtRvf7188frmZu0qZ+OAOQpV - /Qpd5aKEfOBVSc2zucr10mBwc1cHnxo8uTiRIenJ+en5j6fn5y9d5WphQzY3//Nun9Dwc75aHnP3 - vkMYuugVYeGuwTridQXWYcRWIsI1+H7h4EgirYh9CCMQw2/eiMErLNzPsiKTCjCuBDT1C3cMpOBh - 6CiIytBR7QOoecMe2WA55vTwJiLX3SEKI/yO/N+/8Aq19tFQ4ejs2cvLk7PLZ6fHM7g28MOAPmom - 0JHCVuIaFu4VaS0pKoJwyfwOrZMmkz67vHh+DJ6bidEaR8AwsZC25HiAH2cLXvABe+sVFHFNvAIP - rSRuvJGwD2AxWQdpEIZtR3UHHUItKTSwTBQa8KCjGvYZo8ZonhjWLNuAzQpncIWA/RBkxBzaF7I5 - dLrkjWpoJC2tgk8JNWNmDrmwjQ/UkI052odQClhiIGy1FCkDMQlrwSiUwTpvB34lLzD6GEbADcYx - 93tVAXEdUrPHiRjILynsoDKMIitqMQbgZ1JDrjG/nA4MY1ZmKzE0VeGCG2TovXVTSXvZtMh8JduM - Xj3qdUQf6B/8mjGL7VhnoNbX9hCQO1TsSryaweuMR5MBcpS0071S3i5ajUJ4dOdNlD7/1WoCLCI8 - UMiW2YlgMt3aYWanYTlZPeo9PjRb2hn8pBAke0e/5lrAOt/krEXKGbzPYAEb6KifwHKtvSSFVmKf - QrFe9f0JnS1c1rXkOUyaYtxkk+m3HdxKPDSg+p+6NMXS69zX79v+yThNM18cXUYX1fwykHZF3m94 - SDG0E9O9TrDE/Hs0OVKhzovGs1Yg8RBU2E4zvsTa95jHz3zMjYdBiK3Umcm1KWbVoMeGrEih+5XB - 3nY1Fh/aWB0mdufnPeGne+IHoAZ96S9xGxKykQ97Iza4wSDDfuHkbR350c6poPdxt2K0o9bAZOtj - MzWsToWfpuXfWBttshM9T/rlb8FeugctiobEkLjBmD8RZaJ3Fc3c/V+VU5PhNqJXYTd3yM2tpchu - 90LxU8o1ujmnECqXypdnfueIh2S3JmtkdfOLF5WTZE+OLp7f338BAAD//wMAFHVJ9tgGAAA= + H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgSW2pe+vNil7Ek1bCmEyTpZvddWdWWkL+u6RY8PTgffHe + BLYDAyP3TVmt33e715e2/am2j5v+CzfHlOITaJBLpMVFzNgTaEjBLQQyWxb0AhrG0JEDA63D3FGx + KkLMXNRlfV/W9QNoaIMX8gLmY7oVCp2X6BUMvA2k4pCQSR1gr2Sw/qSVDJToGBKpvcLxAOouJNtb + j85dlPXqGcV6hQzzpwaWEJtEyMEve/HcSDiRZ/iTmL4z+ZbA+Oychnz9YyawPma5mc1qqyFk+U9V + 63n+BQAA//8DAG0IyPstAQAA headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88c85b706a4f5e76-EWR + - 88e24cf95a3941d9-EWR Connection: - keep-alive Content-Encoding: @@ -68,7 +57,7 @@ interactions: Content-Type: - application/json Date: - - Fri, 31 May 2024 16:32:13 GMT + - Mon, 03 Jun 2024 20:05:56 GMT Server: - cloudflare Transfer-Encoding: @@ -76,22 +65,24 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '4' + - '0' anthropic-ratelimit-requests-reset: - - '2024-05-31T16:32:57Z' + - '2024-06-03T20:05:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - - '10000' + - '9000' anthropic-ratelimit-tokens-reset: - - '2024-05-31T16:32:57Z' + - '2024-06-03T20:05:57Z' request-id: - - req_01T5fqMvSyUMwwDMphBKC7Ba + - req_01X3Zqqeshn8FCupaMdEgqRS + retry-after: + - '1' via: - 1.1 google x-cloud-trace-context: - - 62a976e4f5e79017b5e9a1a54801766b + - 0794e94ca00c706013360048aa1bc46a status: code: 200 message: OK -version: 1 \ No newline at end of file +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml index c6d930d9b7c..20c469c774e 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml @@ -1,12 +1,12 @@ interactions: - request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": - "text", "text": "Hello, Start all responses with your name Claude."}, {"type": - "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}]}, {"role": - "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, {"role": - "user", "content": [{"type": "text", "text": "Add the time and date to the beginning - of your response after your name."}, {"type": "text", "text": "Explain string - theory succinctly to a complete noob."}]}], "model": "claude-3-opus-20240229"}' + body: '{"max_tokens": 30, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, Start all responses with your name Claude."}, {"type": "text", + "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}]}, {"role": "assistant", + "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, {"role": "user", "content": + [{"type": "text", "text": "Add the time and date to the beginning of your response + after your name."}, {"type": "text", "text": "Explain string theory succinctly + to a complete noob."}]}], "model": "claude-3-opus-20240229"}' headers: accept: - application/json @@ -17,7 +17,7 @@ interactions: connection: - keep-alive content-length: - - '555' + - '553' content-type: - application/json host: @@ -43,22 +43,17 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA1xT204kNxD9lYqfDZqZBWnTbwg2iXIj2oVECVqhwq6etsZdbqrKM9tC/PvKzSWI - J9/q1Dk+x35wKbrOjbq9Xa1//m84X5+m307D4Z9ffr2+nr/IRY/OO5snalWkilty3knJbQNVkxqy - Oe/GEim7zoWMNdLRh6MyVT3arDYnq83mR+ddKGzE5rqbh5eGRt8adBk6d74gPfwkKeLs4WySlGHz - 0cNmtfkAaLBed6sTOPujgy8mibdgAxWZISng05wsBczQC450KLKDxDANs6agYAMaTFKmovS8soGg - rxxxJDbMcFdTjq3xXS5hp1D6paRy2pMoAQqBJZ497NOdoLVSXaQstcQk2xkEbSBpDAxTSWxHOe0I - JhRLIZMew9VASq/IpetQ6nYwsPLcmpr0mPqehNjggLN6QI5AGIYX+sIQigjpVDhqA+MbzAuhB61h - AFRABsoUTAp7uK8oOw9FYBqKFT5+5ypmLaB1uyW1//2SJxcwxtT4MYNOaAkzxDQSayqscEdz4bh4 - Z4MQwYEgNDdIAqU9eTgMqUkSglAlU4Q6tYT3JDPoiDmDBnzx6lVRGpdLVk793PSz1RFGCgNyS7jZ - s20hYAahjJb2yWbfQt+nJVdcsIkiRNIgaVo8LD00xvevoS8SaAmW0arQMdycX/71r4fz38+uLz7B - 5d+fPsPZnxdweX31w1f3+NU7tTLdCqEWdp0jjrdWhd3zgdJ9JQ7kOq45e1eX79Q9uMRTtVsrO2J1 - 3ccT70q1t1vr09PHx+8AAAD//wMAORpVdq0DAAA= + H4sIAAAAAAAAA0yQUUvDUAyF/0rIk0ILXTunvW9jPvkk+iAiMkKbtZe1uV2Ty1bG/rt0OvAp4eQ7 + B07O6Gt02GuzzRbP5erjZVq/va6nSh93x+P+c7M6YII2DTxTrEoNY4Jj6GaBVL0aiWGCfai5Q4dV + R7HmtEjDEDXNs3yZ5XmJCVZBjMXQfZ1vgcan2XodDjdXJ9zlWV6kWZFmJSxWbvFw7+DdRi8NWMth + nMAr0O/O5ivqYDdSz8cw7sELDO2kvlKwlgzIjPvBFCxAFL+b4BBJLPbQc9WSzCBJDQ0Lj9Th5TtB + tTBsRyYNMpem09bCnkXx76R8iCwVo5PYdQnG61PcGb0M0W6we1omGKL9l4rscvkBAAD//wMAf6mf + SHIBAAA= headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e120459bfe422e-EWR + - 88e24c723f1e8c71-EWR Connection: - keep-alive Content-Encoding: @@ -66,7 +61,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 16:40:47 GMT + - Mon, 03 Jun 2024 20:05:35 GMT Server: - cloudflare Transfer-Encoding: @@ -76,19 +71,19 @@ interactions: anthropic-ratelimit-requests-remaining: - '4' anthropic-ratelimit-requests-reset: - - '2024-06-03T16:40:57Z' + - '2024-06-03T20:05:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T16:40:57Z' + - '2024-06-03T20:05:57Z' request-id: - - req_0169ecrQS9L2NLLJ5kxTHNhR + - req_01DgqqUcVyhvARruFHNFA9pG via: - 1.1 google x-cloud-trace-context: - - 3ca44323b7c7d8d7e47380ba95e440a0 + - d47e1ebd73e92fe1f28d8b0b5b336751 status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml index b949f366bed..b8aa6c3c194 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml @@ -1,8 +1,8 @@ interactions: - request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": [{"type": - "text", "text": "Can you explain what Descartes meant by ''I think, therefore - I am''?"}]}], "model": "claude-3-opus-20240229", "stream": true}' + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229", "stream": true}' headers: accept: - application/json @@ -13,7 +13,7 @@ interactions: connection: - keep-alive content-length: - - '212' + - '210' content-type: - application/json host: @@ -38,316 +38,127 @@ interactions: uri: https://api.anthropic.com/v1/messages response: body: - string: "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01FEMDqxXS12RxKs3fDbyQSQ\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-opus-20240229\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":27,\"output_tokens\":1}}}\n\nevent: - content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} - \ }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: - {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"The\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - phrase\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - think\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - I\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - am\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - (\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"originally\"}}\n\nevent: - content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Latin\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - as\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Cog\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ito\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - er\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"go\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - sum\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\")\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - philosophical\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - by\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Ren\xE9\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Des\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - French\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - philosopher\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - mathematician\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - scientist\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"17\"}}\n\nevent: - content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"th\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - century\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - This\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - part\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - approach\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - to\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - epis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tem\"} - }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ology\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - theory\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nDes\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - seeking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - foundation\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - for\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - beyon\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - He\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - use\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - metho\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - systematic\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - questioning\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - doub\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"te\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - until\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - reache\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - something\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - in\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dub\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"itable\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"}}\n\nevent: - content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nHe\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - realize\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - body\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - external\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - worl\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d,\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - almost\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - However\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - couldn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'t\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - doubt\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - own\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - min\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d,\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - because\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - very\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - act\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - doub\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ting\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - requires\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - thought\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - thought\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - requires\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - t\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"hin\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ker\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nTherefore\"}}\n\nevent: - content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - prove\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - his\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Even\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - if\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - were\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - dece\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ive\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - about\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - everything\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - else\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - couldn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'t\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - dece\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ive\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - about\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - was\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - fact\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - he\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - existe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d.\"} - }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nIn\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - other\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - words\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - think\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - therefore\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - I\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - am\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\\"\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - means\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - that\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - act\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - thinking\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - itself\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - is\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - proof\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - one\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s\"}}\n\nevent: - content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - existence\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - It\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"'s\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - foun\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"dational\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - principle\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - which\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Des\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"car\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tes\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - believe\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - all\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - other\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - knowledge\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - coul\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - be\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - derive\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d.\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\\n\\nThis\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - statement\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - has\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - been\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - influential\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - development\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - Western\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - philosophy\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\",\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - particularly\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - in\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - fields\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - epis\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"tem\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ology\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - metaph\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ys\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ics\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - It\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - marks\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - significant\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - break\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - from\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - medieval\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - schol\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ast\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"icism\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d - a\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - turn\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - towards\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - subj\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"ective\"} - \ }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - individual\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - as\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - foundation\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - of\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - philosophical\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" - inquiry\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\".\"} - \ }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 - \ }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"output_tokens\":311} - \ }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\"}\n\n" + string: 'event: message_start + + data: {"type":"message_start","message":{"id":"msg_01Ea8X6hVwT5cbZ6VCiv38Au","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } + + + event: content_block_start + + data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } + + + event: ping + + data: {"type": "ping"} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + phrase"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + \""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + think"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + therefore"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + am"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + ("} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"}} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + in"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + Latin"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + as"} } + + + event: content_block_stop + + data: {"type":"content_block_stop","index":0} + + + event: message_delta + + data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } + + + event: message_stop + + data: {"type":"message_stop" } + + + ' headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e232357b2c19c3-EWR + - 88e24ce0ff9e8c51-EWR Cache-Control: - no-cache Connection: @@ -355,7 +166,7 @@ interactions: Content-Type: - text/event-stream; charset=utf-8 Date: - - Mon, 03 Jun 2024 19:47:39 GMT + - Mon, 03 Jun 2024 20:05:52 GMT Server: - cloudflare Transfer-Encoding: @@ -363,17 +174,17 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '4' + - '2' anthropic-ratelimit-requests-reset: - - '2024-06-03T19:47:57Z' + - '2024-06-03T20:05:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - - '9000' + - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T19:47:57Z' + - '2024-06-03T20:05:57Z' request-id: - - req_01JP4anL918nDaHauSNAs9Hu + - req_01DBURoYEwEGrcb7WMjs6xMx via: - 1.1 google status: diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 4989b756bf3..4a1b4589270 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -18,7 +18,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac """ llm = anthropic.Anthropic() with override_global_config(dict(service="test-svc", env="staging", version="1234")): - cassette_name = "anthropic_completion_sync_39.yaml" + cassette_name = "anthropic_completion_sync_global_tags.yaml" with request_vcr.use_cassette(cassette_name): llm.messages.create( model="claude-3-opus-20240229", @@ -42,7 +42,7 @@ def test_anthropic_llm_sync(anthropic, request_vcr): with request_vcr.use_cassette("anthropic_completion_sync.yaml"): llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=15, messages=[ { "role": "user", @@ -63,7 +63,7 @@ def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt.yaml"): llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=15, messages=[ { "role": "user", @@ -82,7 +82,7 @@ def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, reques with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt_with_chat_history.yaml"): llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=30, messages=[ { "role": "user", @@ -112,7 +112,7 @@ def test_anthropic_llm_error(anthropic, request_vcr): invalid_error = anthropic.BadRequestError with pytest.raises(invalid_error): with request_vcr.use_cassette("anthropic_completion_error.yaml"): - llm.messages.create(model="claude-3-opus-20240229", max_tokens=1024, messages=["Invalid content"]) + llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) @pytest.mark.snapshot() @@ -121,7 +121,7 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): with request_vcr.use_cassette("anthropic_completion_sync_stream.yaml"): stream = llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=15, messages=[ { "role": "user", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json index 788829c06b0..d07fddd5a4d 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json @@ -10,23 +10,23 @@ "error": 1, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665de86c00000000", + "_dd.p.tid": "665e221e00000000", "anthropic.request.api_key": "...key>", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", - "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 106, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 681, in create\n return self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1239, in post\n return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 921, in request\n return self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1019, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", + "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 105, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 899, in create\n return self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1239, in post\n return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 921, in request\n return self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1019, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", "error.type": "anthropic.BadRequestError", "language": "python", - "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" + "runtime-id": "b52cab756a314569a6d74fe80724c91a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 37192 + "process_id": 95434 }, - "duration": 2603000, - "start": 1717430380420422000 - }]] \ No newline at end of file + "duration": 166228000, + "start": 1717445150258843000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json index 1faae35033c..19bd3106442 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json @@ -1,39 +1,39 @@ [[ - { - "name": "anthropic.request", - "service": "", - "resource": "Messages.create", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665de86c00000000", - "anthropic.request.api_key": "...key>", - "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" is a translation of the Latin phrase \"Cogito, ergo sum,\" which was coined by the French phi...", - "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "end_turn", - "anthropic.response.completions.role": "assistant", - "language": "python", - "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 27, - "anthropic.response.usage.output_tokens": 303, - "anthropic.response.usage.total_tokens": 330, - "process_id": 37192 - }, - "duration": 2370000, - "start": 1717430380355108000 - }]] \ No newline at end of file + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e221e00000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "b52cab756a314569a6d74fe80724c91a" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 42, + "process_id": 95434 + }, + "duration": 1633425000, + "start": 1717445150472691000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json index 3e291a64fd8..49a77f4302b 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665de86c00000000", + "_dd.p.tid": "665e222000000000", "anthropic.request.api_key": "...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", @@ -18,13 +18,13 @@ "anthropic.request.messages.0.content.1.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as \"Cogito, ergo sum\") is a philosophical statement by the French phil...", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "8e2ce3f9d69c4b6393f8f97d17bc43d3" + "runtime-id": "b52cab756a314569a6d74fe80724c91a" }, "metrics": { "_dd.measured": 1, @@ -32,10 +32,10 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 38, - "anthropic.response.usage.output_tokens": 337, - "anthropic.response.usage.total_tokens": 375, - "process_id": 37192 + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 53, + "process_id": 95434 }, - "duration": 2667000, - "start": 1717430380393742000 - }]] \ No newline at end of file + "duration": 1951110000, + "start": 1717445152164436000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json index a349a381feb..71ce518d882 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665df39100000000", + "_dd.p.tid": "665e220a00000000", "anthropic.request.api_key": "...key>", "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", "anthropic.request.messages.0.content.0.type": "text", @@ -26,13 +26,13 @@ "anthropic.request.messages.2.content.1.type": "text", "anthropic.request.messages.2.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024}", - "anthropic.response.completions.content.0.text": "Claude, Friday, April 28, 2023 at 11:04 AM: String theory is a theoretical framework in physics that proposes that the fundament...", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 30}", + "anthropic.response.completions.content.0.text": "Claude (2023-03-09 16:15): String theory is a theoretical framework in physics that attempts to unify quantum mechanics and gene...", "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "0af264443f1441098adc8b487438cebe" + "runtime-id": "b52cab756a314569a6d74fe80724c91a" }, "metrics": { "_dd.measured": 1, @@ -40,10 +40,10 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 84, - "anthropic.response.usage.output_tokens": 155, - "anthropic.response.usage.total_tokens": 239, - "process_id": 88493 + "anthropic.response.usage.output_tokens": 30, + "anthropic.response.usage.total_tokens": 114, + "process_id": 95434 }, - "duration": 4876000, - "start": 1717433233172216000 + "duration": 2371348000, + "start": 1717445130515094000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json index 288415a3b5a..4ccb4dce60a 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json @@ -10,23 +10,23 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e1e0700000000", + "_dd.p.tid": "665e221c00000000", "anthropic.request.api_key": "...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 1024, \"stream\": true}", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", "language": "python", - "runtime-id": "93b0a0a29f0140f29375dc8bf89847b9" + "runtime-id": "b52cab756a314569a6d74fe80724c91a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 42080 + "process_id": 95434 }, - "duration": 44181000, - "start": 1717444103186786000 + "duration": 1912334000, + "start": 1717445148270890000 }]] From 2cda152662d42d32736e3bb9cc504da02776abf0 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 17:16:56 -0400 Subject: [PATCH 09/33] add async support --- ddtrace/contrib/anthropic/async_message.py | 86 ++++++++ ddtrace/contrib/anthropic/patch.py | 43 +--- ddtrace/contrib/anthropic/utils.py | 32 +++ hatch.toml | 2 +- .../cassettes/anthropic_completion_async.yaml | 85 ++++++++ ...nthropic_completion_async_global_tags.yaml | 98 +++++++++ ...thropic_completion_async_multi_prompt.yaml | 86 ++++++++ ..._async_multi_prompt_with_chat_history.yaml | 89 ++++++++ .../anthropic_completion_async_stream.yaml | 193 ++++++++++++++++++ .../anthropic_completion_error_async.yaml | 67 ++++++ tests/contrib/anthropic/conftest.py | 6 + tests/contrib/anthropic/test_anthropic.py | 6 - .../contrib/anthropic/test_anthropic_async.py | 142 +++++++++++++ ..._async.test_anthropic_llm_async_basic.json | 39 ++++ ...llm_async_multiple_prompts_no_history.json | 41 ++++ ...nc_multiple_prompts_with_chat_history.json | 49 +++++ ...async.test_anthropic_llm_async_stream.json | 32 +++ ..._async.test_anthropic_llm_error_async.json | 32 +++ 18 files changed, 1086 insertions(+), 42 deletions(-) create mode 100644 ddtrace/contrib/anthropic/async_message.py create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml create mode 100644 tests/contrib/anthropic/test_anthropic_async.py create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json diff --git a/ddtrace/contrib/anthropic/async_message.py b/ddtrace/contrib/anthropic/async_message.py new file mode 100644 index 00000000000..ec5174af962 --- /dev/null +++ b/ddtrace/contrib/anthropic/async_message.py @@ -0,0 +1,86 @@ +import json +import sys + +from ddtrace.contrib.trace_utils import with_traced_module +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import get_argument_value + +from .utils import _extract_api_key +from .utils import handle_non_streamed_response + + +log = get_logger(__name__) + + +@with_traced_module +async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, kwargs): + chat_messages = get_argument_value(args, kwargs, 0, "messages") + integration = anthropic._datadog_integration + + operation_name = func.__name__ + + span = integration.trace( + pin, + "%s.%s" % (instance.__class__.__name__, operation_name), + submit_to_llmobs=True, + interface_type="chat_model", + provider="anthropic", + model=kwargs.get("model", ""), + api_key=_extract_api_key(instance), + ) + + chat_completions = None + try: + for message_idx, message in enumerate(chat_messages): + if not isinstance(message, dict): + continue + if isinstance(message.get("content", None), str): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "anthropic.request.messages.%d.content.0.text" % (message_idx), + integration.trunc(message.get("content", "")), + ) + span.set_tag_str( + "anthropic.request.messages.%d.content.0.type" % (message_idx), + "text", + ) + elif isinstance(message.get("content", None), list): + for block_idx, block in enumerate(message.get("content", [])): + if integration.is_pc_sampled_span(span): + if block.get("type", None) == "text": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + integration.trunc(str(block.get("text", ""))), + ) + elif block.get("type", None) == "image": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + "([IMAGE DETECTED])", + ) + + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), + block.get("type", "text"), + ) + span.set_tag_str( + "anthropic.request.messages.%d.role" % (message_idx), + message.get("role", ""), + ) + params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} + span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) + + chat_completions = await func(*args, **kwargs) + + if isinstance(chat_completions, anthropic.AsyncStream) or isinstance( + chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager + ): + pass + else: + handle_non_streamed_response(integration, chat_completions, args, kwargs, span) + except Exception: + span.set_exc_info(*sys.exc_info()) + span.finish() + raise + finally: + span.finish() + return chat_completions diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 9d6407488b8..9bb16358dcf 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -1,8 +1,6 @@ import json import os import sys -from typing import Any -from typing import Optional import anthropic @@ -15,7 +13,9 @@ from ddtrace.llmobs._integrations import AnthropicIntegration from ddtrace.pin import Pin -from .utils import _get_attr +from .async_message import traced_async_chat_model_generate +from .utils import _extract_api_key +from .utils import handle_non_streamed_response log = get_logger(__name__) @@ -35,16 +35,6 @@ def get_version(): ) -def _extract_api_key(instance: Any) -> Optional[str]: - """ - Extract and format LLM-provider API key from instance. - """ - client = getattr(instance, "_client", "") - if client: - return getattr(client, "api_key", None) - return None - - @with_traced_module def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_messages = get_argument_value(args, kwargs, 0, "messages") @@ -104,9 +94,11 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = func(*args, **kwargs) - if not isinstance(chat_completions, anthropic.Stream) and not isinstance( + if isinstance(chat_completions, anthropic.Stream) or isinstance( chat_completions, anthropic.lib.streaming._messages.MessageStreamManager ): + pass + else: handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) @@ -117,27 +109,6 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): return chat_completions -def handle_non_streamed_response(integration, chat_completions, args, kwargs, span): - for idx, chat_completion in enumerate(chat_completions.content): - if integration.is_pc_sampled_span(span) and getattr(chat_completion, "text", "") != "": - span.set_tag_str( - "anthropic.response.completions.content.%d.text" % (idx), - integration.trunc(str(getattr(chat_completion, "text", ""))), - ) - span.set_tag_str( - "anthropic.response.completions.content.%d.type" % (idx), - chat_completion.type, - ) - - # set message level tags - if getattr(chat_completions, "stop_reason", None) is not None: - span.set_tag_str("anthropic.response.completions.finish_reason", chat_completions.stop_reason) - span.set_tag_str("anthropic.response.completions.role", chat_completions.role) - - usage = _get_attr(chat_completions, "usage", {}) - integration.record_usage(span, usage) - - def patch(): if getattr(anthropic, "_datadog_patch", False): return @@ -149,6 +120,7 @@ def patch(): anthropic._datadog_integration = integration wrap("anthropic", "resources.messages.Messages.create", traced_chat_model_generate(anthropic)) + wrap("anthropic", "resources.messages.AsyncMessages.create", traced_async_chat_model_generate(anthropic)) def unpatch(): @@ -158,5 +130,6 @@ def unpatch(): anthropic._datadog_patch = False unwrap(anthropic.resources.messages.Messages, "create") + unwrap(anthropic.resources.messages.AsyncMessages, "create") delattr(anthropic, "_datadog_integration") diff --git a/ddtrace/contrib/anthropic/utils.py b/ddtrace/contrib/anthropic/utils.py index 8830ca49456..962f60bcf8a 100644 --- a/ddtrace/contrib/anthropic/utils.py +++ b/ddtrace/contrib/anthropic/utils.py @@ -1,4 +1,5 @@ from typing import Any +from typing import Optional from ddtrace.internal.logger import get_logger @@ -6,6 +7,37 @@ log = get_logger(__name__) +def handle_non_streamed_response(integration, chat_completions, args, kwargs, span): + for idx, chat_completion in enumerate(chat_completions.content): + if integration.is_pc_sampled_span(span) and getattr(chat_completion, "text", "") != "": + span.set_tag_str( + "anthropic.response.completions.content.%d.text" % (idx), + integration.trunc(str(getattr(chat_completion, "text", ""))), + ) + span.set_tag_str( + "anthropic.response.completions.content.%d.type" % (idx), + chat_completion.type, + ) + + # set message level tags + if getattr(chat_completions, "stop_reason", None) is not None: + span.set_tag_str("anthropic.response.completions.finish_reason", chat_completions.stop_reason) + span.set_tag_str("anthropic.response.completions.role", chat_completions.role) + + usage = _get_attr(chat_completions, "usage", {}) + integration.record_usage(span, usage) + + +def _extract_api_key(instance: Any) -> Optional[str]: + """ + Extract and format LLM-provider API key from instance. + """ + client = getattr(instance, "_client", "") + if client: + return getattr(client, "api_key", None) + return None + + def _get_attr(o: Any, attr: str, default: Any): # Since our response may be a dict or object, convenience method if isinstance(o, dict): diff --git a/hatch.toml b/hatch.toml index b49b99393a0..d5fa89c311d 100644 --- a/hatch.toml +++ b/hatch.toml @@ -43,7 +43,7 @@ fmt = [ "style", ] spelling = [ - "codespell --skip='ddwaf.h' {args:ddtrace/ tests/ releasenotes/ docs/}", + "codespell --skip='ddwaf.h,*cassettes*' {args:ddtrace/ tests/ releasenotes/ docs/}", ] typing = [ "mypy {args}", diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml new file mode 100644 index 00000000000..fe442975553 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml @@ -0,0 +1,85 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '194' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgjYq4Z1Ea8BCIh2IlLMk0u3SzEzOz2Dbkv0uKBU8P3hfv + zeA7MDBw3+Sb+tVd+pfqXL59DGW5q3521Xt9AQ1yHnF1IbPtETRMFFbCMnsWGwU0DNRhAANtsKnD + 7D6jMXFW5MVDXhTPoKGlKBgFzOd8KxQ8rdErGKgdqoMdKLEanQ/ENDrf2qBYrOCAUdQetkqcj0et + xOGEB5pQbZUd9qDuaPK9jzaEs/IRli8NLDQ2E1qmuM63p0boiJHhT2L8ThhbBBNTCBrS9Z6Zwccx + yc1siicNlOQ/tXlcll8AAAD//wMAZbFxUjwBAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f1e58dac404-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 20:29:14 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T20:29:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T20:29:57Z' + request-id: + - req_01N5Z3LdCjQJJK8Y1PMWwNKE + via: + - 1.1 google + x-cloud-trace-context: + - 55482147ed863c2794cecea1f2d77645 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml new file mode 100644 index 00000000000..b633b3c1487 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml @@ -0,0 +1,98 @@ +interactions: +- request: + body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What does + Nietzsche mean by ''God is dead''?"}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '144' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA3SVwW4jNwyGX4XQpS0wMRInC2x9K3LIblsU2HTRHOoioEe0JVgjzoocT7xB3qRv + 0xcrqLFj7257MiyJFP+PPzXPLnq3cJ1sHi+v7j+Pf7y58U/9k3z4ZXv/4X5/H/ufXeN035OdIhHc + kGtc4WQLKBJFMatrXMeeklu4NuHg6eL6gvtBLuaX85vL+fxH17iWs1JWt/jz+ZhQ6clC68/CPQTK + 8Fsk/SxtIFhjx4OkPfSF24SxIw9Ld8ceooAn9EsHMUOIAldv385hxbyFpfsYCO5wD7+3kXJLzdJB + IBhRILNCh9uYN4CQolLBBKKo1FFWwBUPChoI6CmKWjDwGu7Yz+B9FiX0zTFVy53FWCrONcZTm2Ku + EYVS3ETOgNmDFvRRI2dM0HHBFHVvZT+QKJUMwm0k3c+WeZnfUSHAQiDcEWxpDz3HrALKMGRPxVj7 + E6LvBKInXFjs1Qx+ZRG7f41Rw+KMJK+Eyo48aECFnrhPdESnsSMY7eLEYnpqtO2eV36QNAisKEVa + S9W2wzSQzI6o61rBKSRKN6VtA6ZEeWO5DdSah+zrITmHVQHMZ3DLWejTYPnqvoUsnSfUsHSHfizg + IWqwbiH4uDPsK5QosObyCrk5A1CLfgXwTUtA9qLUCYw8JA8tp4S90Aw+GqFpMRF66wNCTyWyt1Jy + DNGENjCGSjCuCYQsU0eYY94kEgE/kEWaElzJ0VchbgKVI0NTfz2DnyDTCFTwvH2CI6hVcuBwoLB0 + gNYH4L7nokM2ZxmBMHSYq2naQqhkN8cCPObDbbVRhwqt01b3DN4RoN9xi0q+5pnQ//P3ikpHWVpr + wPcy9FQ6zD/AGPjApkQxabwjaDnvbDC+9Lvdd6jFdJwqqbpvZnBbosZPQwVzG0oUjWh6zjHUubNz + Laavz1kHYhvgQMu4nMZwcn3hjk3ZSLjNJNKAUFpfeMoRUzN5l4Sy1sdAecTi5YTmKwudXHnqxgSj + x10lDiNO7bBKTG31vJ3GlM7lv89AUm3RfDHa07PXTWFjldSRYh+4vLanDThNVh9iYuE+VDqV95B0 + MHMnzF5a7A+um0b+W2Ar1gD4Oq7TNP+fu+yPMhTytLbxi2owa0hUOdosZkAYuSQP42Fgrej/fljO + 3oWZe/mrcaLcPxZC4ewWjrJ/1KFkd9g4PhJukYeUGjfUL9Pi2cXcD/qovKUsbjGfN44HPV+6vnnz + 8vIvAAAA//8DAPgYWAT4BgAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f599eeec32d-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 20:29:41 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '0' + anthropic-ratelimit-requests-reset: + - '2024-06-03T20:29:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '9000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T20:29:57Z' + request-id: + - req_01ReKkyQv1Dz3rhDD1L4TWLC + retry-after: + - '16' + via: + - 1.1 google + x-cloud-trace-context: + - 2e8c9d4c044c2f2072b5c582d172abfa + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml new file mode 100644 index 00000000000..bf50aa0baa1 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml @@ -0,0 +1,86 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, I am looking for information about some books!"}, {"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '277' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgTWvVPeqpKMWAFdRKWJIxWbuZjTuz0BD63yXFgqcH74v3 + JnANGOi5rfJFuX0u293D+Pi+fbvZlavX9f33ugQNMg44u5DZtggaYvAzYZkdiyUBDX1o0IOB2tvU + YLbMwpA4K/JilRfFHWioAwmSgPmYLoWCxzl6BgMvHaqhi5ZR7WGjpHN00Eo6jPgVIqqNsv0e1FWI + rnVkvR+VI/VkxZGyDKdPDSxhqCJaDjTvtcdKwgGJ4U9i/ElINYKh5L2GdP5jJnA0JLmYzfJWQ0jy + n1pcn06/AAAA//8DAAb+bZQtAQAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f40cb7e0f5b-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 20:29:20 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '2' + anthropic-ratelimit-requests-reset: + - '2024-06-03T20:29:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T20:29:57Z' + request-id: + - req_01BS4oP1hUmcmcaiaLUCqYSG + via: + - 1.1 google + x-cloud-trace-context: + - ad90e6c237e5abdea060b5655b8f209e + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml new file mode 100644 index 00000000000..524df951e99 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml @@ -0,0 +1,89 @@ +interactions: +- request: + body: '{"max_tokens": 30, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, Start all responses with your name Claude."}, {"type": "text", + "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}]}, {"role": "assistant", + "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, {"role": "user", "content": + [{"type": "text", "text": "Add the time and date to the beginning of your response + after your name."}, {"type": "text", "text": "Explain string theory succinctly + to a complete noob."}]}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '553' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0yPX0sDMRDEv8qyzzlJcxXaPCrFP1BQLCiIlOVuvYamyZlsaM/S7y5XLfg0y+z8 + BuaIrkWLu9yt9aR+3S4W8zJ80/P+7ubx+q2/X1FChTL0PKY4Z+oYFaboR4NydlkoCCrcxZY9Wmw8 + lZaruop9yZXRZqqNmaPCJgbhIGjfj5dC4cOInsXi7Zm08CDgMjQlJQ7iB1htSsotDQqWNMBkpsBo + UwMJTLTVBp6WV/AiyYUOZMMxDSNOvzeLa8jDZ6Id72Paggt4+lCYJfbrxJRjGIfRYS1xyyHj3yvz + V+HQMNpQvFdYzsPtEV3oi1zCdjZVGIv8t2p9Ov0AAAD//wMAsZe/jFYBAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f4a3b9617a1-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 20:29:22 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '1' + anthropic-ratelimit-requests-reset: + - '2024-06-03T20:29:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T20:29:57Z' + request-id: + - req_01L9tqQ99Z6CGKeDbNAKigxE + via: + - 1.1 google + x-cloud-trace-context: + - 5cf34ec34c4a793ebe5dbebdc03ab228 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml new file mode 100644 index 00000000000..5533c93e7d3 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml @@ -0,0 +1,193 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], + "model": "claude-3-opus-20240229", "stream": true}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '210' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: 'event: message_start + + data: {"type":"message_start","message":{"id":"msg_01Si43rw1LcRZyVVjZUoMZPd","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } + + + event: content_block_start + + data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } + + + event: ping + + data: {"type": "ping"} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + phrase"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + \""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + think"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + therefore"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + am"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + ("} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + in"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + Latin"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + as"} } + + + event: content_block_stop + + data: {"type":"content_block_stop","index":0 } + + + event: message_delta + + data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } + + + event: message_stop + + data: {"type":"message_stop" } + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f28398d17b9-EWR + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 03 Jun 2024 20:29:18 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '3' + anthropic-ratelimit-requests-reset: + - '2024-06-03T20:29:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T20:29:57Z' + request-id: + - req_01WnUgxExmDjUBGVtpwdGyWT + via: + - 1.1 google + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml new file mode 100644 index 00000000000..84013aee4ee --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml @@ -0,0 +1,67 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": ["Invalid content"], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '86' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: '{"type":"error","error":{"type":"invalid_request_error","message":"messages.0: + Input does not match the expected shape."}}' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e26f1ce85b4379-EWR + Connection: + - keep-alive + Content-Length: + - '122' + Content-Type: + - application/json + Date: + - Mon, 03 Jun 2024 20:29:13 GMT + Server: + - cloudflare + request-id: + - req_014VdE8JFtyZZgtyAmYPD4pd + via: + - 1.1 google + x-cloud-trace-context: + - 99147895ac9c66c15e1de6063c141048 + x-should-retry: + - 'false' + status: + code: 400 + message: Bad Request +version: 1 diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index fe0010849e6..d5307714849 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -5,6 +5,7 @@ from ddtrace import Pin from ddtrace.contrib.anthropic.patch import patch from ddtrace.contrib.anthropic.patch import unpatch +from tests.contrib.anthropic.utils import get_request_vcr from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_config @@ -46,3 +47,8 @@ def anthropic(ddtrace_config_anthropic): yield anthropic unpatch() + + +@pytest.fixture(scope="session") +def request_vcr(): + yield get_request_vcr() diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 4a1b4589270..bcd6cfa81f0 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -1,14 +1,8 @@ import pytest -from tests.contrib.anthropic.utils import get_request_vcr from tests.utils import override_global_config -@pytest.fixture(scope="session") -def request_vcr(): - yield get_request_vcr() - - def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): """ When the global config UST tags are set diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py new file mode 100644 index 00000000000..71e652f1fad --- /dev/null +++ b/tests/contrib/anthropic/test_anthropic_async.py @@ -0,0 +1,142 @@ +import pytest + +from tests.utils import override_global_config + + +@pytest.mark.asyncio +async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): + """ + When the global config UST tags are set + The service name should be used for all data + The env should be used for all data + The version should be used for all data + """ + llm = anthropic.AsyncAnthropic() + with override_global_config(dict(service="test-svc", env="staging", version="1234")): + cassette_name = "anthropic_completion_async_global_tags.yaml" + with request_vcr.use_cassette(cassette_name): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=1024, + messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], + ) + + span = mock_tracer.pop_traces()[0][0] + assert span.resource == "AsyncMessages.create" + assert span.service == "test-svc" + assert span.get_tag("env") == "staging" + assert span.get_tag("version") == "1234" + assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" + assert span.get_tag("anthropic.request.api_key") == "...key>" + + +@pytest.mark.asyncio +# @pytest.mark.snapshot +async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_context): + with snapshot_context(): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_async.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + } + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_multiple_prompts_no_history(anthropic, request_vcr, snapshot_context): + with snapshot_context(): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_async_multi_prompt.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, I am looking for information about some books!"}, + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + }, + ], + } + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, request_vcr, snapshot_context): + with snapshot_context(): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_async_multi_prompt_with_chat_history.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=30, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, Start all responses with your name Claude."}, + {"type": "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}, + ], + }, + {"role": "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Add the time and date to the beginning of your response after your name.", + }, + {"type": "text", "text": "Explain string theory succinctly to a complete noob."}, + ], + }, + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_context): + with snapshot_context(): + llm = anthropic.AsyncAnthropic() + invalid_error = anthropic.BadRequestError + with pytest.raises(invalid_error): + with request_vcr.use_cassette("anthropic_completion_error_async.yaml"): + await llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): + with snapshot_context(ignores=["meta.error.stack"]): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_async_stream.yaml"): + stream = await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + }, + ], + stream=True, + ) + async for chunk in stream: + print(chunk.type) diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json new file mode 100644 index 00000000000..04b3d28502d --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json @@ -0,0 +1,39 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e2c0700000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "The famous philosophical statement \"I think, therefore I am\" (originally in", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "8d2ef62d83884add8d544970c41bf728" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 42, + "process_id": 29818 + }, + "duration": 1112700000, + "start": 1717447687466355000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json new file mode 100644 index 00000000000..5a61f296563 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json @@ -0,0 +1,41 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e2bf800000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.1.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "b9f16b2ca36b405485b2d3fb6a735bd0" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 38, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 53, + "process_id": 28997 + }, + "duration": 547612000, + "start": 1717447672414622000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json new file mode 100644 index 00000000000..9f9643424c2 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json @@ -0,0 +1,49 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e2be900000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "End all responses with [COPY, CLAUDE OVER AND OUT!]", + "anthropic.request.messages.0.content.1.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.messages.1.content.0.text": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]", + "anthropic.request.messages.1.content.0.type": "text", + "anthropic.request.messages.1.role": "assistant", + "anthropic.request.messages.2.content.0.text": "Add the time and date to the beginning of your response after your name.", + "anthropic.request.messages.2.content.0.type": "text", + "anthropic.request.messages.2.content.1.text": "Explain string theory succinctly to a complete noob.", + "anthropic.request.messages.2.content.1.type": "text", + "anthropic.request.messages.2.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 30}", + "anthropic.response.completions.content.0.text": "Claude: It is currently Thursday, May 18, 2023 at 10:02 PM. String theory is a theoretical framework in", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "c0229f435efe410daec373a127583690" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 84, + "anthropic.response.usage.output_tokens": 30, + "anthropic.response.usage.total_tokens": 114, + "process_id": 28083 + }, + "duration": 590200000, + "start": 1717447657168311000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json new file mode 100644 index 00000000000..a7f0a6ed204 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json @@ -0,0 +1,32 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e2bd900000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", + "language": "python", + "runtime-id": "d4ce0d37e8c64f0aa013b92c180cbb42" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 27167 + }, + "duration": 1165397000, + "start": 1717447641142774000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json new file mode 100644 index 00000000000..62e99f2c078 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json @@ -0,0 +1,32 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 1, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e30d300000000", + "anthropic.request.api_key": "...key>", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", + "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/async_message.py\", line 72, in traced_async_chat_model_generate\n chat_completions = await func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 1856, in create\n return await self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1789, in post\n return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1492, in request\n return await self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1583, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", + "error.type": "anthropic.BadRequestError", + "language": "python", + "runtime-id": "11b1816282c84f6fa4d62f19c6833546" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 93267 + }, + "duration": 2707000, + "start": 1717448915658855000 + }]] From f093d538b414721cd69b93b4f36facd2a90ff41c Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 17:18:12 -0400 Subject: [PATCH 10/33] add release note --- releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml diff --git a/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml b/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml new file mode 100644 index 00000000000..b0c9fed520d --- /dev/null +++ b/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + anthropic: This introduces tracing support for anthropic chat messages. + See `the docs `_ + for more information. \ No newline at end of file From 95dc7d4724d6b0e977674b7955306d6856165c70 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 17:32:11 -0400 Subject: [PATCH 11/33] add async --- ddtrace/contrib/anthropic/__init__.py | 5 +- ddtrace/contrib/anthropic/async_message.py | 86 ---------------- ddtrace/contrib/anthropic/patch.py | 73 +++++++++++++- ddtrace/llmobs/_integrations/anthropic.py | 2 +- .../feat-anthropic-04a880a26ff44d9c.yaml | 2 +- ...nthropic_completion_async_global_tags.yaml | 98 ------------------- ...anthropic_completion_sync_global_tags.yaml | 95 ------------------ tests/contrib/anthropic/test_anthropic.py | 10 +- .../contrib/anthropic/test_anthropic_async.py | 10 +- ...st_anthropic.test_anthropic_llm_error.json | 2 +- ...est_anthropic.test_anthropic_llm_sync.json | 2 +- ...t_anthropic_llm_sync_multiple_prompts.json | 2 +- ...nc_multiple_prompts_with_chat_history.json | 2 +- ...hropic.test_anthropic_llm_sync_stream.json | 2 +- ..._async.test_anthropic_llm_async_basic.json | 2 +- ...llm_async_multiple_prompts_no_history.json | 2 +- ...nc_multiple_prompts_with_chat_history.json | 2 +- ...async.test_anthropic_llm_async_stream.json | 2 +- ..._async.test_anthropic_llm_error_async.json | 2 +- 19 files changed, 96 insertions(+), 305 deletions(-) delete mode 100644 ddtrace/contrib/anthropic/async_message.py delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml diff --git a/ddtrace/contrib/anthropic/__init__.py b/ddtrace/contrib/anthropic/__init__.py index 4be873eac84..7ffe1baf5c1 100644 --- a/ddtrace/contrib/anthropic/__init__.py +++ b/ddtrace/contrib/anthropic/__init__.py @@ -12,9 +12,8 @@ (beta) Prompt and Completion Sampling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The following data is collected in span tags with a default sampling rate of ``1.0``: - -- Prompt inputs and completions for the ``Messages.create`` endpoint. +Prompt texts and completion content for the ``Messages.create`` endpoint are collected in span tags with a default sampling rate of ``1.0``. +These tags will have truncation applied if the text exceeds the configured character limit. Enabling diff --git a/ddtrace/contrib/anthropic/async_message.py b/ddtrace/contrib/anthropic/async_message.py deleted file mode 100644 index ec5174af962..00000000000 --- a/ddtrace/contrib/anthropic/async_message.py +++ /dev/null @@ -1,86 +0,0 @@ -import json -import sys - -from ddtrace.contrib.trace_utils import with_traced_module -from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils import get_argument_value - -from .utils import _extract_api_key -from .utils import handle_non_streamed_response - - -log = get_logger(__name__) - - -@with_traced_module -async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, kwargs): - chat_messages = get_argument_value(args, kwargs, 0, "messages") - integration = anthropic._datadog_integration - - operation_name = func.__name__ - - span = integration.trace( - pin, - "%s.%s" % (instance.__class__.__name__, operation_name), - submit_to_llmobs=True, - interface_type="chat_model", - provider="anthropic", - model=kwargs.get("model", ""), - api_key=_extract_api_key(instance), - ) - - chat_completions = None - try: - for message_idx, message in enumerate(chat_messages): - if not isinstance(message, dict): - continue - if isinstance(message.get("content", None), str): - if integration.is_pc_sampled_span(span): - span.set_tag_str( - "anthropic.request.messages.%d.content.0.text" % (message_idx), - integration.trunc(message.get("content", "")), - ) - span.set_tag_str( - "anthropic.request.messages.%d.content.0.type" % (message_idx), - "text", - ) - elif isinstance(message.get("content", None), list): - for block_idx, block in enumerate(message.get("content", [])): - if integration.is_pc_sampled_span(span): - if block.get("type", None) == "text": - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - integration.trunc(str(block.get("text", ""))), - ) - elif block.get("type", None) == "image": - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - "([IMAGE DETECTED])", - ) - - span.set_tag_str( - "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), - block.get("type", "text"), - ) - span.set_tag_str( - "anthropic.request.messages.%d.role" % (message_idx), - message.get("role", ""), - ) - params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} - span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) - - chat_completions = await func(*args, **kwargs) - - if isinstance(chat_completions, anthropic.AsyncStream) or isinstance( - chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager - ): - pass - else: - handle_non_streamed_response(integration, chat_completions, args, kwargs, span) - except Exception: - span.set_exc_info(*sys.exc_info()) - span.finish() - raise - finally: - span.finish() - return chat_completions diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 9bb16358dcf..0c764ac6d2f 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -13,7 +13,6 @@ from ddtrace.llmobs._integrations import AnthropicIntegration from ddtrace.pin import Pin -from .async_message import traced_async_chat_model_generate from .utils import _extract_api_key from .utils import handle_non_streamed_response @@ -102,7 +101,79 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) + raise + finally: span.finish() + return chat_completions + + +@with_traced_module +async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, kwargs): + chat_messages = get_argument_value(args, kwargs, 0, "messages") + integration = anthropic._datadog_integration + + operation_name = func.__name__ + + span = integration.trace( + pin, + "%s.%s" % (instance.__class__.__name__, operation_name), + submit_to_llmobs=True, + interface_type="chat_model", + provider="anthropic", + model=kwargs.get("model", ""), + api_key=_extract_api_key(instance), + ) + + chat_completions = None + try: + for message_idx, message in enumerate(chat_messages): + if not isinstance(message, dict): + continue + if isinstance(message.get("content", None), str): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "anthropic.request.messages.%d.content.0.text" % (message_idx), + integration.trunc(message.get("content", "")), + ) + span.set_tag_str( + "anthropic.request.messages.%d.content.0.type" % (message_idx), + "text", + ) + elif isinstance(message.get("content", None), list): + for block_idx, block in enumerate(message.get("content", [])): + if integration.is_pc_sampled_span(span): + if block.get("type", None) == "text": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + integration.trunc(str(block.get("text", ""))), + ) + elif block.get("type", None) == "image": + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), + "([IMAGE DETECTED])", + ) + + span.set_tag_str( + "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), + block.get("type", "text"), + ) + span.set_tag_str( + "anthropic.request.messages.%d.role" % (message_idx), + message.get("role", ""), + ) + params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} + span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) + + chat_completions = await func(*args, **kwargs) + + if isinstance(chat_completions, anthropic.AsyncStream) or isinstance( + chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager + ): + pass + else: + handle_non_streamed_response(integration, chat_completions, args, kwargs, span) + except Exception: + span.set_exc_info(*sys.exc_info()) raise finally: span.finish() diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index cffb9c10996..5b18a43dd74 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -31,7 +31,7 @@ def _set_base_span_tags( span.set_tag_str(MODEL, model) if api_key is not None: if len(api_key) >= 4: - span.set_tag_str(API_KEY, f"...{str(api_key[-4:])}") + span.set_tag_str(API_KEY, f"sk-...{str(api_key[-4:])}") else: span.set_tag_str(API_KEY, api_key) diff --git a/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml b/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml index b0c9fed520d..1a7f7af527e 100644 --- a/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml +++ b/releasenotes/notes/feat-anthropic-04a880a26ff44d9c.yaml @@ -3,4 +3,4 @@ features: - | anthropic: This introduces tracing support for anthropic chat messages. See `the docs `_ - for more information. \ No newline at end of file + for more information. diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml deleted file mode 100644 index b633b3c1487..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async_global_tags.yaml +++ /dev/null @@ -1,98 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What does - Nietzsche mean by ''God is dead''?"}], "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '144' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA3SVwW4jNwyGX4XQpS0wMRInC2x9K3LIblsU2HTRHOoioEe0JVgjzoocT7xB3qRv - 0xcrqLFj7257MiyJFP+PPzXPLnq3cJ1sHi+v7j+Pf7y58U/9k3z4ZXv/4X5/H/ufXeN035OdIhHc - kGtc4WQLKBJFMatrXMeeklu4NuHg6eL6gvtBLuaX85vL+fxH17iWs1JWt/jz+ZhQ6clC68/CPQTK - 8Fsk/SxtIFhjx4OkPfSF24SxIw9Ld8ceooAn9EsHMUOIAldv385hxbyFpfsYCO5wD7+3kXJLzdJB - IBhRILNCh9uYN4CQolLBBKKo1FFWwBUPChoI6CmKWjDwGu7Yz+B9FiX0zTFVy53FWCrONcZTm2Ku - EYVS3ETOgNmDFvRRI2dM0HHBFHVvZT+QKJUMwm0k3c+WeZnfUSHAQiDcEWxpDz3HrALKMGRPxVj7 - E6LvBKInXFjs1Qx+ZRG7f41Rw+KMJK+Eyo48aECFnrhPdESnsSMY7eLEYnpqtO2eV36QNAisKEVa - S9W2wzSQzI6o61rBKSRKN6VtA6ZEeWO5DdSah+zrITmHVQHMZ3DLWejTYPnqvoUsnSfUsHSHfizg - IWqwbiH4uDPsK5QosObyCrk5A1CLfgXwTUtA9qLUCYw8JA8tp4S90Aw+GqFpMRF66wNCTyWyt1Jy - DNGENjCGSjCuCYQsU0eYY94kEgE/kEWaElzJ0VchbgKVI0NTfz2DnyDTCFTwvH2CI6hVcuBwoLB0 - gNYH4L7nokM2ZxmBMHSYq2naQqhkN8cCPObDbbVRhwqt01b3DN4RoN9xi0q+5pnQ//P3ikpHWVpr - wPcy9FQ6zD/AGPjApkQxabwjaDnvbDC+9Lvdd6jFdJwqqbpvZnBbosZPQwVzG0oUjWh6zjHUubNz - Laavz1kHYhvgQMu4nMZwcn3hjk3ZSLjNJNKAUFpfeMoRUzN5l4Sy1sdAecTi5YTmKwudXHnqxgSj - x10lDiNO7bBKTG31vJ3GlM7lv89AUm3RfDHa07PXTWFjldSRYh+4vLanDThNVh9iYuE+VDqV95B0 - MHMnzF5a7A+um0b+W2Ar1gD4Oq7TNP+fu+yPMhTytLbxi2owa0hUOdosZkAYuSQP42Fgrej/fljO - 3oWZe/mrcaLcPxZC4ewWjrJ/1KFkd9g4PhJukYeUGjfUL9Pi2cXcD/qovKUsbjGfN44HPV+6vnnz - 8vIvAAAA//8DAPgYWAT4BgAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f599eeec32d-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:29:41 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '0' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:29:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '9000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:29:57Z' - request-id: - - req_01ReKkyQv1Dz3rhDD1L4TWLC - retry-after: - - '16' - via: - - 1.1 google - x-cloud-trace-context: - - 2e8c9d4c044c2f2072b5c582d172abfa - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml deleted file mode 100644 index ed4e63bcccd..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_global_tags.yaml +++ /dev/null @@ -1,95 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What does - Nietzsche mean by ''God is dead''?"}], "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '144' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA3SV3W4bNxCFX2XAm6bAWrDlNE10G7duUKA3KRAgVWCMlqPlQFzOhjOUohp+k7xN - X6wgJcWKk1wJoubvfGdI3Tv2buFGHe4ur27k/fvX4/rq1WZVbl6Emxe/8u9eXOdsP1GNIlUcyHUu - S6wHqMpqmMx1bhRP0S1cH7F4uri+kKnoxfxy/vxyPn/lOtdLMkrmFv/cnwoafaqp7WPh3gVK8BeT - /at9IFjjKEXjHqYsfUQeycPS3YoHVvCEfumAEwRWWIlsYOn+DgS3uIe3PVPqaeng2dXLl/OfOwgE - O1RIYjDihtMACJGNMkZQQ6ORkgGupBhYIKBPrFZrgKzhVvwM3iQ1Qv+lVC9jzamlJLUcT33kVA84 - rWM5ZWeKPLAkwOTBMno2loQRRskY2fZVxDtSo5xApWey/WyZlulP2sMknExrREmecmXtawerqqeQ - UWlRY69mZ+BWFJm25MECHuT8liIPwSjVkbt2lFnbeEp9iZhhChxFZQpM2rVZtVI0XnMP6LeY+gZJ - IaCHWKsL4FE01Qnt1Hpdv1WfmuJA8DpkVmNMB82wzjjSTvKm6ZzPvrFVyzCQmj4qOAenUvKB7aHc - FmMhbd1GwqMDEHlNHewC96EZNmSpDH397eRJdxAjasCmMMmOcquTKVKTXKPrZj/x5vqcN+ahPNJm - rQX1zPqisEa2ADspsbLDI7yJMouvkYkDR9axDky5dvW8ZV8w6jFLLZdhiFQz13wmtY47lTyJnlzg - DJG3pG3S5zP44+lKhDJiqps3FjXoM6FRAyC79AOa1QNc6ZOlLgpDYV9BdUe3TwwOU2f6WDgTIGS6 - oFoaq4m1xPeuQpv4lzO2P9WLlnqarKUEgqX77/OK8khJ+7B0IBmW7m2ZKI+Ylq5ZXftvWY+NvmIZ - BPo2mWwp9zLSF/Sw2h9QHO5XpfgExxnlulxt2DcJSBuW7tvHSQFhJMMpSOb+B09NHzANtWdCK/lx - ras/pwuUiDysJX+lxQQMNwSZdJKkvOKWU8O+o+PMS4Sd5OiPq3buw6Oth4tcX0yIkgbKEKRu4Q73 - M/fwoXNqMt1lQpXkFo6Sv7OSkzv+oPSxPYBukUqMnSvtf2Nx7zhNxe5MNpTULebzzkmx86Prq/nD - w/8AAAD//wMA/u+dGZYGAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e24c811f331865-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:05:50 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '3' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:05:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:05:57Z' - request-id: - - req_01RiCD4awdkdHENeXbiiJ3qF - via: - - 1.1 google - x-cloud-trace-context: - - 9dc6c7c173695d452740285b9cc1bd66 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index bcd6cfa81f0..2ac27b1dfc7 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -12,11 +12,11 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac """ llm = anthropic.Anthropic() with override_global_config(dict(service="test-svc", env="staging", version="1234")): - cassette_name = "anthropic_completion_sync_global_tags.yaml" + cassette_name = "anthropic_completion_sync.yaml" with request_vcr.use_cassette(cassette_name): llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=15, messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], ) @@ -26,7 +26,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac assert span.get_tag("env") == "staging" assert span.get_tag("version") == "1234" assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" - assert span.get_tag("anthropic.request.api_key") == "...key>" + assert span.get_tag("anthropic.request.api_key") == "sk-...key>" # @pytest.mark.snapshot(ignores=["metrics.anthropic.tokens.total_cost", "resource"]) @@ -129,5 +129,5 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): ], stream=True, ) - for chunk in stream: - print(chunk.type) + for _ in stream: + pass diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py index 71e652f1fad..f56576e1684 100644 --- a/tests/contrib/anthropic/test_anthropic_async.py +++ b/tests/contrib/anthropic/test_anthropic_async.py @@ -13,11 +13,11 @@ async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vc """ llm = anthropic.AsyncAnthropic() with override_global_config(dict(service="test-svc", env="staging", version="1234")): - cassette_name = "anthropic_completion_async_global_tags.yaml" + cassette_name = "anthropic_completion_async.yaml" with request_vcr.use_cassette(cassette_name): await llm.messages.create( model="claude-3-opus-20240229", - max_tokens=1024, + max_tokens=15, messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], ) @@ -27,7 +27,7 @@ async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vc assert span.get_tag("env") == "staging" assert span.get_tag("version") == "1234" assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" - assert span.get_tag("anthropic.request.api_key") == "...key>" + assert span.get_tag("anthropic.request.api_key") == "sk-...key>" @pytest.mark.asyncio @@ -138,5 +138,5 @@ async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_conte ], stream=True, ) - async for chunk in stream: - print(chunk.type) + async for _ in stream: + pass diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json index d07fddd5a4d..f6f2993d8d6 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e221e00000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json index 19bd3106442..32fca96e31c 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e221e00000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json index 49a77f4302b..09d86cdd1de 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e222000000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json index 71ce518d882..71bb629c7ab 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e220a00000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.content.1.text": "End all responses with [COPY, CLAUDE OVER AND OUT!]", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json index 4ccb4dce60a..1db5f3ca452 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e221c00000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json index 04b3d28502d..27698130a1b 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e2c0700000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json index 5a61f296563..cb1f1c01df5 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e2bf800000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json index 9f9643424c2..cc88e38f4ee 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e2be900000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.content.1.text": "End all responses with [COPY, CLAUDE OVER AND OUT!]", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json index a7f0a6ed204..469af165d6e 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e2bd900000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json index 62e99f2c078..5439214068f 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json @@ -11,7 +11,7 @@ "meta": { "_dd.p.dm": "-0", "_dd.p.tid": "665e30d300000000", - "anthropic.request.api_key": "...key>", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", From e2b294fdefa19f346f14afaad40c5bf0eb3df299 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 19:05:32 -0400 Subject: [PATCH 12/33] add async llm tags --- ddtrace/contrib/anthropic/patch.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 64e77e9db89..8877607bc87 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -101,6 +101,8 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: if integration.is_pc_sampled_llmobs(span): @@ -177,8 +179,13 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) + span.finish() return chat_completions From 9ab3ce9aa8fc3cabd618820ebbae355f957c64d3 Mon Sep 17 00:00:00 2001 From: William Conti Date: Mon, 3 Jun 2024 19:48:19 -0400 Subject: [PATCH 13/33] add streaming --- ddtrace/contrib/anthropic/_streaming.py | 322 ++++++++++++++++++ ddtrace/contrib/anthropic/patch.py | 46 ++- ddtrace/llmobs/_integrations/anthropic.py | 18 +- ...hropic_completion_async_stream_helper.yaml | 195 +++++++++++ ...thropic_completion_sync_stream_helper.yaml | 195 +++++++++++ tests/contrib/anthropic/conftest.py | 1 + tests/contrib/anthropic/test_anthropic.py | 18 + .../contrib/anthropic/test_anthropic_async.py | 21 +- ...hropic.test_anthropic_llm_sync_stream.json | 19 +- ...test_anthropic_llm_sync_stream_helper.json | 39 +++ ...async.test_anthropic_llm_async_stream.json | 19 +- ...est_anthropic_llm_async_stream_helper.json | 39 +++ 12 files changed, 893 insertions(+), 39 deletions(-) create mode 100644 ddtrace/contrib/anthropic/_streaming.py create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json diff --git a/ddtrace/contrib/anthropic/_streaming.py b/ddtrace/contrib/anthropic/_streaming.py new file mode 100644 index 00000000000..5bd7ccdb1e6 --- /dev/null +++ b/ddtrace/contrib/anthropic/_streaming.py @@ -0,0 +1,322 @@ +import sys +from typing import Dict +from typing import Tuple + +from ddtrace.internal.logger import get_logger +from ddtrace.vendor import wrapt + +from .utils import _get_attr + + +log = get_logger(__name__) + + +def handle_streamed_response(integration, resp, args, kwargs, span): + if _is_stream(resp): + return TracedAnthropicStream(resp, integration, span, args, kwargs) + elif _is_async_stream(resp): + return TracedAnthropicAsyncStream(resp, integration, span, args, kwargs) + elif _is_stream_manager(resp): + return TracedAnthropicStreamManager(resp, integration, span, args, kwargs) + elif _is_async_stream_manager(resp): + return TracedAnthropicAsyncStreamManager(resp, integration, span, args, kwargs) + + +class BaseTracedAnthropicStream(wrapt.ObjectProxy): + def __init__(self, wrapped, integration, span, args, kwargs): + super().__init__(wrapped) + n = kwargs.get("n", 1) or 1 + self._dd_span = span + self._streamed_chunks = [[] for _ in range(n)] + self._dd_integration = integration + self._kwargs = kwargs + self._args = args + + +class TracedAnthropicStream(BaseTracedAnthropicStream): + def __enter__(self): + self.__wrapped__.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.__wrapped__.__exit__(exc_type, exc_val, exc_tb) + + def __iter__(self): + return self + + def __next__(self): + try: + chunk = self.__wrapped__.__next__() + self._streamed_chunks.append(chunk) + return chunk + except StopIteration: + _process_finished_stream( + self._dd_integration, self._dd_span, self._args, self._kwargs, self._streamed_chunks + ) + self._dd_span.finish() + raise + except Exception: + self._dd_span.set_exc_info(*sys.exc_info()) + self._dd_span.finish() + raise + + def _text_stream(self): + for chunk in self: + if chunk.type == "content_block_delta" and chunk.delta.type == "text_delta": + yield chunk.delta.text + + +class TracedAnthropicAsyncStream(BaseTracedAnthropicStream): + async def __aenter__(self): + await self.__wrapped__.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) + + def __aiter__(self): + return self + + async def __anext__(self): + try: + chunk = await self.__wrapped__.__anext__() + self._streamed_chunks.append(chunk) + return chunk + except StopAsyncIteration: + _process_finished_stream( + self._dd_integration, + self._dd_span, + self._args, + self._kwargs, + self._streamed_chunks, + ) + self._dd_span.finish() + raise + except Exception: + self._dd_span.set_exc_info(*sys.exc_info()) + self._dd_span.finish() + raise + + async def _text_stream(self): + async for chunk in self: + if chunk.type == "content_block_delta" and chunk.delta.type == "text_delta": + yield chunk.delta.text + + +class TracedAnthropicStreamManager(BaseTracedAnthropicStream): + def __enter__(self): + stream = self.__wrapped__.__enter__() + traced_stream = TracedAnthropicStream( + stream, + self._dd_integration, + self._dd_span, + self._args, + self._kwargs, + ) + traced_stream.text_stream = traced_stream._text_stream() + return traced_stream + + def __exit__(self, exc_type, exc_val, exc_tb): + self.__wrapped__.__exit__(exc_type, exc_val, exc_tb) + + +class TracedAnthropicAsyncStreamManager(BaseTracedAnthropicStream): + async def __aenter__(self): + stream = await self.__wrapped__.__aenter__() + traced_stream = TracedAnthropicAsyncStream( + stream, + self._dd_integration, + self._dd_span, + self._args, + self._kwargs, + ) + traced_stream.text_stream = traced_stream._text_stream() + return traced_stream + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) + + +def _process_finished_stream(integration, span, args, kwargs, streamed_chunks): + # builds the response message given streamed chunks and sets according span tags + resp_message = {} + try: + resp_message = _construct_message(streamed_chunks) + + if integration.is_pc_sampled_span(span): + _tag_streamed_chat_completion_response(integration, span, resp_message) + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags( + span=span, + resp=resp_message, + args=args, + kwargs=kwargs, + ) + except Exception: + log.warning("Error processing streamed completion/chat response.", exc_info=True) + + +def _construct_message(streamed_chunks): + """Iteratively build up a response message from streamed chunks. + + The resulting message dictionary is of form: + {"content": [{"type": [TYPE], "text": "[TEXT]"}], "role": "...", "finish_reason": "...", "usage": ...} + """ + message = {"content": []} + for chunk in streamed_chunks: + message = _extract_from_chunk(chunk, message) + + if "finish_reason" in message: + return message + return message + + +def _extract_from_chunk(chunk, message={}) -> Tuple[Dict[str, str], bool]: + """Constructs a chat message dictionary from streamed chunks given chunk type""" + TRANSFORMATIONS_BY_BLOCK_TYPE = { + "message_start": _on_message_start_chunk, + "content_block_start": _on_content_block_start_chunk, + "content_block_delta": _on_content_block_delta_chunk, + "message_delta": _on_message_delta_chunk, + } + chunk_type = getattr(chunk, "type", "") + transformation = TRANSFORMATIONS_BY_BLOCK_TYPE.get(chunk_type) + if transformation is not None: + message = transformation(chunk, message) + + return message + + +def _on_message_start_chunk(chunk, message): + # this is the starting chunk of the message + if getattr(chunk, "type", "") != "message_start": + return message + + chunk_message = getattr(chunk, "message", "") + if chunk_message: + content_text = "" + contents = getattr(chunk.message, "content", []) + for content in contents: + if content.type == "text": + content_text += content.text + content_type = "text" + elif content.type == "image": + content_text = "([IMAGE DETECTED])" + content_type = "image" + message["content"].append({"text": content_text, "type": content_type}) + + chunk_role = getattr(chunk_message, "role", "") + chunk_usage = getattr(chunk_message, "usage", "") + chunk_finish_reason = getattr(chunk_message, "stop_reason", "") + if chunk_role: + message["role"] = chunk_role + if chunk_usage: + message["usage"] = {} + message["usage"]["input_tokens"] = getattr(chunk_usage, "input_tokens", 0) + message["usage"]["output_tokens"] = getattr(chunk_usage, "output_tokens", 0) + if chunk_finish_reason: + message["finish_reason"] = chunk_finish_reason + return message + + +def _on_content_block_start_chunk(chunk, message): + # this is the start to a message.content block (possibly 1 of several content blocks) + if getattr(chunk, "type", "") != "content_block_start": + return message + + message["content"].append({"type": "text", "text": ""}) + return message + + +def _on_content_block_delta_chunk(chunk, message): + # delta events contain new content for the current message.content block + if getattr(chunk, "type", "") != "content_block_delta": + return message + + delta_block = getattr(chunk, "delta", "") + chunk_content = getattr(delta_block, "text", "") + if chunk_content: + message["content"][-1]["text"] += chunk_content + return message + + +def _on_message_delta_chunk(chunk, message): + # message delta events signal the end of the message + if getattr(chunk, "type", "") != "message_delta": + return message + + delta_block = getattr(chunk, "delta", "") + chunk_finish_reason = getattr(delta_block, "stop_reason", "") + if chunk_finish_reason: + message["finish_reason"] = chunk_finish_reason + message["content"][-1]["text"] = message["content"][-1]["text"].strip() + + chunk_usage = getattr(chunk, "usage", {}) + if chunk_usage: + message_usage = message.get("usage", {"output_tokens": 0, "input_tokens": 0}) + message_usage["output_tokens"] += getattr(chunk_usage, "output_tokens", 0) + message_usage["input_tokens"] += getattr(chunk_usage, "input_tokens", 0) + message["usage"] = message_usage + + return message + + +# To-Do: Handle error blocks appropriately +# def _on_error_chunk(chunk, message): +# # this is the start to a message.content block (possibly 1 of several content blocks) +# if getattr(chunk, "type", "") != "error": +# return message + +# message["content"].append({"type": "text", "text": ""}) +# return message + + +def _tag_streamed_chat_completion_response(integration, span, message): + """Tagging logic for streamed chat completions.""" + if message is None: + return + for idx, block in enumerate(message["content"]): + span.set_tag_str("anthropic.response.completions.content.%d.type" % idx, str(integration.trunc(block["type"]))) + span.set_tag_str("anthropic.response.completions.content.%d.text" % idx, str(integration.trunc(block["text"]))) + span.set_tag_str("anthropic.response.completions.role", str(message["role"])) + if message.get("finish_reason") is not None: + span.set_tag_str("anthropic.response.completions.finish_reason", str(message["finish_reason"])) + + usage = _get_attr(message, "usage", {}) + integration.record_usage(span, usage) + + +def _is_stream(resp): + # type: (...) -> bool + import anthropic + + if hasattr(anthropic, "Stream") and isinstance(resp, anthropic.Stream): + return True + return False + + +def _is_async_stream(resp): + # type: (...) -> bool + import anthropic + + if hasattr(anthropic, "AsyncStream") and isinstance(resp, anthropic.AsyncStream): + return True + return False + + +def _is_stream_manager(resp): + # type: (...) -> bool + import anthropic + + if hasattr(anthropic, "MessageStreamManager") and isinstance(resp, anthropic.MessageStreamManager): + return True + return False + + +def _is_async_stream_manager(resp): + # type: (...) -> bool + import anthropic + + if hasattr(anthropic, "AsyncMessageStreamManager") and isinstance(resp, anthropic.AsyncMessageStreamManager): + return True + return False diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 8877607bc87..1e4bcbacfe2 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -13,6 +13,7 @@ from ddtrace.llmobs._integrations import AnthropicIntegration from ddtrace.pin import Pin +from ._streaming import handle_streamed_response from .utils import _extract_api_key from .utils import handle_non_streamed_response @@ -38,8 +39,9 @@ def get_version(): def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_messages = get_argument_value(args, kwargs, 0, "messages") integration = anthropic._datadog_integration + stream = False - operation_name = func.__name__ + operation_name = "stream" if "stream" in kwargs else func.__name__ span = integration.trace( pin, @@ -93,10 +95,13 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = func(*args, **kwargs) - if isinstance(chat_completions, anthropic.Stream) or isinstance( - chat_completions, anthropic.lib.streaming._messages.MessageStreamManager + if ( + isinstance(chat_completions, anthropic.Stream) + or isinstance(chat_completions, anthropic.lib.streaming._messages.MessageStreamManager) + or isinstance(chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager) ): - pass + stream = True + return handle_streamed_response(integration, chat_completions, args, kwargs, span) else: handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: @@ -105,10 +110,11 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: - if integration.is_pc_sampled_llmobs(span): - integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) - - span.finish() + # we don't want to finish the span if it is a stream as it will get finished once the iterator is exhausted + if not stream: + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) + span.finish() return chat_completions @@ -116,8 +122,9 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_messages = get_argument_value(args, kwargs, 0, "messages") integration = anthropic._datadog_integration + stream = False - operation_name = func.__name__ + operation_name = "stream" if "stream" in kwargs else func.__name__ span = integration.trace( pin, @@ -171,10 +178,9 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, chat_completions = await func(*args, **kwargs) - if isinstance(chat_completions, anthropic.AsyncStream) or isinstance( - chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager - ): - pass + if isinstance(chat_completions, anthropic.AsyncStream): + stream = True + return handle_streamed_response(integration, chat_completions, args, kwargs, span) else: handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: @@ -183,10 +189,11 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: - if integration.is_pc_sampled_llmobs(span): - integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) - - span.finish() + # we don't want to finish the span if it is a stream as it will get finished once the iterator is exhausted + if not stream: + if integration.is_pc_sampled_llmobs(span): + integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) + span.finish() return chat_completions @@ -201,7 +208,10 @@ def patch(): anthropic._datadog_integration = integration wrap("anthropic", "resources.messages.Messages.create", traced_chat_model_generate(anthropic)) + wrap("anthropic", "resources.messages.Messages.stream", traced_chat_model_generate(anthropic)) wrap("anthropic", "resources.messages.AsyncMessages.create", traced_async_chat_model_generate(anthropic)) + # AsyncMessages.stream is a sync function + wrap("anthropic", "resources.messages.AsyncMessages.stream", traced_chat_model_generate(anthropic)) def unpatch(): @@ -211,6 +221,8 @@ def unpatch(): anthropic._datadog_patch = False unwrap(anthropic.resources.messages.Messages, "create") + unwrap(anthropic.resources.messages.Messages, "stream") unwrap(anthropic.resources.messages.AsyncMessages, "create") + unwrap(anthropic.resources.messages.AsyncMessages, "stream") delattr(anthropic, "_datadog_integration") diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 1b9a271d4f6..c2fd6dd88c8 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -76,7 +76,7 @@ def llmobs_set_tags( output_messages = self._extract_output_message(resp) span.set_tag_str(OUTPUT_MESSAGES, json.dumps(output_messages)) - span.set_tag_str(METRICS, json.dumps(_get_llmobs_metrics_tags(span))) + span.set_tag_str(METRICS, json.dumps(AnthropicIntegration._get_llmobs_metrics_tags(span))) def _extract_input_message(self, messages): """Extract input messages from the stored prompt. @@ -127,7 +127,7 @@ def _extract_output_message(self, response): if isinstance(text, str): output_messages.append({"content": self.trunc(text), "role": role}) return output_messages - + def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: if not usage: return @@ -140,10 +140,10 @@ def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: if input_tokens is not None and output_tokens is not None: span.set_metric("anthropic.response.usage.total_tokens", input_tokens + output_tokens) - -def _get_llmobs_metrics_tags(span): - return { - "input_tokens": span.get_metric("anthropic.response.usage.input_tokens"), - "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), - "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), - } + @classmethod + def _get_llmobs_metrics_tags(cls, span): + return { + "input_tokens": span.get_metric("anthropic.response.usage.input_tokens"), + "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), + "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), + } diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml new file mode 100644 index 00000000000..531a058d414 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml @@ -0,0 +1,195 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": "Can you explain + what Descartes meant by ''I think, therefore I am''?"}], "model": "claude-3-opus-20240229", + "stream": true}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '182' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - AsyncAnthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - async:asyncio + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + x-stainless-stream-helper: + - messages + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: 'event: message_start + + data: {"type":"message_start","message":{"id":"msg_01NuXdck4ZpJDQsVrGiSfXKj","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } + + + event: content_block_start + + data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } + + + event: ping + + data: {"type": "ping"} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + phrase"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + \""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + think"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + therefore"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + am"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + ("} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + in"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + Latin"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + as"} } + + + event: content_block_stop + + data: {"type":"content_block_stop","index":0 } + + + event: message_delta + + data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } + + + event: message_stop + + data: {"type":"message_stop" } + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e380a02a84726b-EWR + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 03 Jun 2024 23:35:57 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T23:35:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T23:35:57Z' + request-id: + - req_018CVoMUAAn8vhLNvTRkmB98 + via: + - 1.1 google + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml new file mode 100644 index 00000000000..d87a6dabdb1 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml @@ -0,0 +1,195 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": "Can you explain + what Descartes meant by ''I think, therefore I am''?"}], "model": "claude-3-opus-20240229", + "stream": true}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '182' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + x-stainless-stream-helper: + - messages + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: 'event: message_start + + data: {"type":"message_start","message":{"id":"msg_017z3e6QB2VQhUBqF9zuLmiK","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } + + + event: content_block_start + + data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } + + + event: ping + + data: {"type": "ping"} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + famous"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + philosophical"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + statement"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + \""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + think"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + therefore"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + I"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + am"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + ("}} + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } + + + event: content_block_delta + + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" + in"} } + + + event: content_block_stop + + data: {"type":"content_block_stop","index":0} + + + event: message_delta + + data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } + + + event: message_stop + + data: {"type":"message_stop" } + + + ' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e3651e9ad342c3-EWR + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Mon, 03 Jun 2024 23:17:10 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-03T23:17:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-03T23:17:57Z' + request-id: + - req_01DAYFsZKJLWzyfyT5rtXYVt + via: + - 1.1 google + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index cc37b34678d..788328f21a6 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -44,6 +44,7 @@ def mock_tracer(ddtrace_global_config, anthropic): LLMObs.disable() LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) yield mock_tracer + LLMObs.disable() @pytest.fixture diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 2ac27b1dfc7..a4caa4b725c 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -131,3 +131,21 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): ) for _ in stream: pass + + +@pytest.mark.snapshot() +def test_anthropic_llm_sync_stream_helper(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_sync_stream_helper.yaml"): + with llm.messages.stream( + max_tokens=15, + messages=[ + { + "role": "user", + "content": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + model="claude-3-opus-20240229", + ) as stream: + for _ in stream.text_stream: + pass diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py index f56576e1684..b894d504119 100644 --- a/tests/contrib/anthropic/test_anthropic_async.py +++ b/tests/contrib/anthropic/test_anthropic_async.py @@ -119,7 +119,7 @@ async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_contex @pytest.mark.asyncio async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): - with snapshot_context(ignores=["meta.error.stack"]): + with snapshot_context(): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion_async_stream.yaml"): stream = await llm.messages.create( @@ -140,3 +140,22 @@ async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_conte ) async for _ in stream: pass + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_stream_helper(anthropic, request_vcr, snapshot_context): + with snapshot_context(): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_async_stream_helper.yaml"): + async with llm.messages.stream( + max_tokens=15, + messages=[ + { + "role": "user", + "content": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + model="claude-3-opus-20240229", + ) as stream: + async for _ in stream.text_stream: + pass diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json index 1db5f3ca452..198cbfde52c 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "Messages.create", + "resource": "Messages.stream", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -10,23 +10,30 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e221c00000000", + "_dd.p.tid": "665e4ebb00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "6513257167e243f6aae19abf6f061700" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 95434 + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 16, + "anthropic.response.usage.total_tokens": 43, + "process_id": 33643 }, - "duration": 1912334000, - "start": 1717445148270890000 + "duration": 10432000, + "start": 1717456571355149000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json new file mode 100644 index 00000000000..069078aa916 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json @@ -0,0 +1,39 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.stream", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e4ef200000000", + "anthropic.request.api_key": "sk-...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 15, \"model\": \"claude-3-opus-20240229\"}", + "anthropic.response.completions.content.0.text": "The famous philosophical statement \"I think, therefore I am\" (originally in", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "e0f085664f904f43864b4e295d95052b" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 16, + "anthropic.response.usage.total_tokens": 43, + "process_id": 36523 + }, + "duration": 1474332000, + "start": 1717456626825122000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json index 469af165d6e..b5f067eaaa7 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "AsyncMessages.create", + "resource": "AsyncMessages.stream", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -10,23 +10,30 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e2bd900000000", + "_dd.p.tid": "665e4f4000000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "d4ce0d37e8c64f0aa013b92c180cbb42" + "runtime-id": "bae10fd37b8b4c7f850bd5d262a75d78" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 27167 + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 16, + "anthropic.response.usage.total_tokens": 43, + "process_id": 40600 }, - "duration": 1165397000, - "start": 1717447641142774000 + "duration": 34082000, + "start": 1717456704161290000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json new file mode 100644 index 00000000000..bdaf0439fd1 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json @@ -0,0 +1,39 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "AsyncMessages.stream", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665e542e00000000", + "anthropic.request.api_key": "sk-...key>", + "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 15, \"model\": \"claude-3-opus-20240229\"}", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "f59aff6a77ac4933a3d59e282a8126b2" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.output_tokens": 16, + "anthropic.response.usage.total_tokens": 43, + "process_id": 5638 + }, + "duration": 7087734000, + "start": 1717457966661153000 + }]] From 50d1906097cac33310e583f09b03a4e63af12beb Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 10:45:00 -0400 Subject: [PATCH 14/33] fix async error code --- tests/contrib/anthropic/test_anthropic_async.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py index f56576e1684..5b723e0fc0c 100644 --- a/tests/contrib/anthropic/test_anthropic_async.py +++ b/tests/contrib/anthropic/test_anthropic_async.py @@ -109,7 +109,7 @@ async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, @pytest.mark.asyncio async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_context): - with snapshot_context(): + with snapshot_context(ignores=["meta.error.stack"]): llm = anthropic.AsyncAnthropic() invalid_error = anthropic.BadRequestError with pytest.raises(invalid_error): @@ -119,7 +119,7 @@ async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_contex @pytest.mark.asyncio async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): - with snapshot_context(ignores=["meta.error.stack"]): + with snapshot_context(): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion_async_stream.yaml"): stream = await llm.messages.create( From 3773cfa0246f7217093701bda70d92db3be44d5d Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 10:58:38 -0400 Subject: [PATCH 15/33] add more tests --- tests/contrib/anthropic/conftest.py | 2 -- tests/contrib/anthropic/test_anthropic.py | 6 ++++++ tests/contrib/anthropic/test_anthropic_async.py | 6 ++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index 788328f21a6..ad3b49dfdad 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -14,8 +14,6 @@ from tests.utils import override_env from tests.utils import override_global_config -from .utils import get_request_vcr - @pytest.fixture def ddtrace_config_anthropic(): diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index a4caa4b725c..311bbdb17ba 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -149,3 +149,9 @@ def test_anthropic_llm_sync_stream_helper(anthropic, request_vcr): ) as stream: for _ in stream.text_stream: pass + + message = stream.get_final_message() + assert message is not None + + message = stream.get_final_text() + assert message is not None diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py index b894d504119..fc16fa6fdb4 100644 --- a/tests/contrib/anthropic/test_anthropic_async.py +++ b/tests/contrib/anthropic/test_anthropic_async.py @@ -159,3 +159,9 @@ async def test_anthropic_llm_async_stream_helper(anthropic, request_vcr, snapsho ) as stream: async for _ in stream.text_stream: pass + + message = await stream.get_final_message() + assert message is not None + + message = await stream.get_final_text() + assert message is not None From a5ffc9c088a4f25fa0a0cbcdea47d5ba91c38f36 Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 11:31:47 -0400 Subject: [PATCH 16/33] fix riotfile --- riotfile.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/riotfile.py b/riotfile.py index 1b0f89590ce..19125b00c73 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2516,16 +2516,6 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): "cohere": latest, } ), - Venv( - name="anthropic", - command="pytest {cmdargs} tests/contrib/anthropic", - pys=select_pys(min_version="3.7", max_version="3.11"), - pkgs={ - "pytest-asyncio": latest, - "vcrpy": latest, - "anthropic": latest, - }, - ), Venv( pkgs={ "langchain": latest, @@ -2543,6 +2533,16 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): ), ], ), + Venv( + name="anthropic", + command="pytest {cmdargs} tests/contrib/anthropic", + pys=select_pys(min_version="3.7", max_version="3.11"), + pkgs={ + "pytest-asyncio": latest, + "vcrpy": latest, + "anthropic": latest, + }, + ), Venv( name="logbook", pys=select_pys(), From 569bdcec2423bf862ea5a707b330ffa99a8043be Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 13:52:30 -0400 Subject: [PATCH 17/33] add tools and refactor PR --- ddtrace/contrib/anthropic/__init__.py | 3 +- ddtrace/contrib/anthropic/patch.py | 21 +- riotfile.py | 2 +- .../cassettes/anthropic_completion_async.yaml | 85 ----- ...thropic_completion_async_multi_prompt.yaml | 86 ----- ..._async_multi_prompt_with_chat_history.yaml | 89 ------ .../anthropic_completion_async_stream.yaml | 193 ------------ .../anthropic_completion_error_async.yaml | 67 ---- ...=> anthropic_completion_multi_prompt.yaml} | 0 ...etion_multi_prompt_with_chat_history.yaml} | 0 ....yaml => anthropic_completion_stream.yaml} | 0 .../cassettes/anthropic_completion_sync.yaml | 85 ----- tests/contrib/anthropic/test_anthropic.py | 297 +++++++++++++++++- .../contrib/anthropic/test_anthropic_async.py | 142 --------- .../contrib/anthropic/test_anthropic_patch.py | 3 + tests/contrib/anthropic/utils.py | 43 ++- ...ic.test_anthropic.test_anthropic_llm.json} | 10 +- ...t_anthropic.test_anthropic_llm_basic.json} | 12 +- ....test_anthropic_llm_multiple_prompts.json} | 10 +- ...opic_llm_multiple_prompts_no_history.json} | 10 +- ...m_multiple_prompts_with_chat_history.json} | 10 +- ..._anthropic.test_anthropic_llm_stream.json} | 10 +- ...st_anthropic.test_anthropic_llm_tools.json | 35 +++ ...nc_multiple_prompts_with_chat_history.json | 49 --- ...async.test_anthropic_llm_async_stream.json | 32 -- ..._async.test_anthropic_llm_error_async.json | 32 -- 26 files changed, 407 insertions(+), 919 deletions(-) delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml rename tests/contrib/anthropic/cassettes/{anthropic_completion_sync_multi_prompt.yaml => anthropic_completion_multi_prompt.yaml} (100%) rename tests/contrib/anthropic/cassettes/{anthropic_completion_sync_multi_prompt_with_chat_history.yaml => anthropic_completion_multi_prompt_with_chat_history.yaml} (100%) rename tests/contrib/anthropic/cassettes/{anthropic_completion_sync_stream.yaml => anthropic_completion_stream.yaml} (100%) delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml delete mode 100644 tests/contrib/anthropic/test_anthropic_async.py rename tests/snapshots/{tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json} (87%) rename tests/snapshots/{tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json} (78%) rename tests/snapshots/{tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json} (89%) rename tests/snapshots/{tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json} (89%) rename tests/snapshots/{tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json} (92%) rename tests/snapshots/{tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json} (82%) create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json diff --git a/ddtrace/contrib/anthropic/__init__.py b/ddtrace/contrib/anthropic/__init__.py index 7ffe1baf5c1..420358f1178 100644 --- a/ddtrace/contrib/anthropic/__init__.py +++ b/ddtrace/contrib/anthropic/__init__.py @@ -22,8 +22,7 @@ The Anthropic integration is enabled automatically when you use :ref:`ddtrace-run` or :ref:`import ddtrace.auto`. -Note that these commands also enable the ``requests`` and ``aiohttp`` -integrations which trace HTTP requests from the Anthropic library. +Note that these commands also enable the ``httx`` integration which traces HTTP requests from the Anthropic library. Alternatively, use :func:`patch() ` to manually enable the Anthropic integration:: diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 0c764ac6d2f..f5c701e96fa 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -14,6 +14,7 @@ from ddtrace.pin import Pin from .utils import _extract_api_key +from .utils import _get_attr from .utils import handle_non_streamed_response @@ -69,12 +70,12 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): elif isinstance(message.get("content", None), list): for block_idx, block in enumerate(message.get("content", [])): if integration.is_pc_sampled_span(span): - if block.get("type", None) == "text": + if _get_attr(block, "type", None) == "text": span.set_tag_str( "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - integration.trunc(str(block.get("text", ""))), + integration.trunc(str(_get_attr(block, "text", ""))), ) - elif block.get("type", None) == "image": + elif _get_attr(block, "type", None) == "image": span.set_tag_str( "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), "([IMAGE DETECTED])", @@ -82,13 +83,13 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): span.set_tag_str( "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), - block.get("type", "text"), + _get_attr(block, "type", "text"), ) span.set_tag_str( "anthropic.request.messages.%d.role" % (message_idx), message.get("role", ""), ) - params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} + params_to_tag = {k: v for k, v in kwargs.items() if k not in ["messages", "model", "tools"]} span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) chat_completions = func(*args, **kwargs) @@ -142,12 +143,12 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, elif isinstance(message.get("content", None), list): for block_idx, block in enumerate(message.get("content", [])): if integration.is_pc_sampled_span(span): - if block.get("type", None) == "text": + if _get_attr(block, "type", None) == "text": span.set_tag_str( "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), - integration.trunc(str(block.get("text", ""))), + integration.trunc(str(_get_attr(block, "text", ""))), ) - elif block.get("type", None) == "image": + elif _get_attr(block, "type", None) == "image": span.set_tag_str( "anthropic.request.messages.%d.content.%d.text" % (message_idx, block_idx), "([IMAGE DETECTED])", @@ -155,13 +156,13 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, span.set_tag_str( "anthropic.request.messages.%d.content.%d.type" % (message_idx, block_idx), - block.get("type", "text"), + _get_attr(block, "type", "text"), ) span.set_tag_str( "anthropic.request.messages.%d.role" % (message_idx), message.get("role", ""), ) - params_to_tag = {k: v for k, v in kwargs.items() if k != "messages"} + params_to_tag = {k: v for k, v in kwargs.items() if k not in ["messages", "model", "tools"]} span.set_tag_str("anthropic.request.parameters", json.dumps(params_to_tag)) chat_completions = await func(*args, **kwargs) diff --git a/riotfile.py b/riotfile.py index 19125b00c73..842f0c1cefe 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2536,7 +2536,7 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): Venv( name="anthropic", command="pytest {cmdargs} tests/contrib/anthropic", - pys=select_pys(min_version="3.7", max_version="3.11"), + pys=select_pys(min_version="3.7", max_version="3.12"), pkgs={ "pytest-asyncio": latest, "vcrpy": latest, diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml deleted file mode 100644 index fe442975553..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async.yaml +++ /dev/null @@ -1,85 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], - "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '194' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgjYq4Z1Ea8BCIh2IlLMk0u3SzEzOz2Dbkv0uKBU8P3hfv - zeA7MDBw3+Sb+tVd+pfqXL59DGW5q3521Xt9AQ1yHnF1IbPtETRMFFbCMnsWGwU0DNRhAANtsKnD - 7D6jMXFW5MVDXhTPoKGlKBgFzOd8KxQ8rdErGKgdqoMdKLEanQ/ENDrf2qBYrOCAUdQetkqcj0et - xOGEB5pQbZUd9qDuaPK9jzaEs/IRli8NLDQ2E1qmuM63p0boiJHhT2L8ThhbBBNTCBrS9Z6Zwccx - yc1siicNlOQ/tXlcll8AAAD//wMAZbFxUjwBAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f1e58dac404-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:29:14 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '4' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:29:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:29:57Z' - request-id: - - req_01N5Z3LdCjQJJK8Y1PMWwNKE - via: - - 1.1 google - x-cloud-trace-context: - - 55482147ed863c2794cecea1f2d77645 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml deleted file mode 100644 index bf50aa0baa1..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt.yaml +++ /dev/null @@ -1,86 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Hello, I am looking for information about some books!"}, {"type": "text", - "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], - "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '277' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgTWvVPeqpKMWAFdRKWJIxWbuZjTuz0BD63yXFgqcH74v3 - JnANGOi5rfJFuX0u293D+Pi+fbvZlavX9f33ugQNMg44u5DZtggaYvAzYZkdiyUBDX1o0IOB2tvU - YLbMwpA4K/JilRfFHWioAwmSgPmYLoWCxzl6BgMvHaqhi5ZR7WGjpHN00Eo6jPgVIqqNsv0e1FWI - rnVkvR+VI/VkxZGyDKdPDSxhqCJaDjTvtcdKwgGJ4U9i/ElINYKh5L2GdP5jJnA0JLmYzfJWQ0jy - n1pcn06/AAAA//8DAAb+bZQtAQAA - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f40cb7e0f5b-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:29:20 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '2' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:29:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:29:57Z' - request-id: - - req_01BS4oP1hUmcmcaiaLUCqYSG - via: - - 1.1 google - x-cloud-trace-context: - - ad90e6c237e5abdea060b5655b8f209e - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml deleted file mode 100644 index 524df951e99..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async_multi_prompt_with_chat_history.yaml +++ /dev/null @@ -1,89 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 30, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Hello, Start all responses with your name Claude."}, {"type": "text", - "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}]}, {"role": "assistant", - "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, {"role": "user", "content": - [{"type": "text", "text": "Add the time and date to the beginning of your response - after your name."}, {"type": "text", "text": "Explain string theory succinctly - to a complete noob."}]}], "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '553' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0yPX0sDMRDEv8qyzzlJcxXaPCrFP1BQLCiIlOVuvYamyZlsaM/S7y5XLfg0y+z8 - BuaIrkWLu9yt9aR+3S4W8zJ80/P+7ubx+q2/X1FChTL0PKY4Z+oYFaboR4NydlkoCCrcxZY9Wmw8 - lZaruop9yZXRZqqNmaPCJgbhIGjfj5dC4cOInsXi7Zm08CDgMjQlJQ7iB1htSsotDQqWNMBkpsBo - UwMJTLTVBp6WV/AiyYUOZMMxDSNOvzeLa8jDZ6Id72Paggt4+lCYJfbrxJRjGIfRYS1xyyHj3yvz - V+HQMNpQvFdYzsPtEV3oi1zCdjZVGIv8t2p9Ov0AAAD//wMAsZe/jFYBAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f4a3b9617a1-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:29:22 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '1' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:29:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:29:57Z' - request-id: - - req_01L9tqQ99Z6CGKeDbNAKigxE - via: - - 1.1 google - x-cloud-trace-context: - - 5cf34ec34c4a793ebe5dbebdc03ab228 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml deleted file mode 100644 index 5533c93e7d3..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream.yaml +++ /dev/null @@ -1,193 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], - "model": "claude-3-opus-20240229", "stream": true}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '210' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: 'event: message_start - - data: {"type":"message_start","message":{"id":"msg_01Si43rw1LcRZyVVjZUoMZPd","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } - - - event: content_block_start - - data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } - - - event: ping - - data: {"type": "ping"} - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - phrase"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - \""} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - think"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - therefore"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - I"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - am"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - ("} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - in"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - Latin"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - as"} } - - - event: content_block_stop - - data: {"type":"content_block_stop","index":0 } - - - event: message_delta - - data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } - - - event: message_stop - - data: {"type":"message_stop" } - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f28398d17b9-EWR - Cache-Control: - - no-cache - Connection: - - keep-alive - Content-Type: - - text/event-stream; charset=utf-8 - Date: - - Mon, 03 Jun 2024 20:29:18 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '3' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:29:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:29:57Z' - request-id: - - req_01WnUgxExmDjUBGVtpwdGyWT - via: - - 1.1 google - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml deleted file mode 100644 index 84013aee4ee..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_error_async.yaml +++ /dev/null @@ -1,67 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": ["Invalid content"], "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '86' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: '{"type":"error","error":{"type":"invalid_request_error","message":"messages.0: - Input does not match the expected shape."}}' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e26f1ce85b4379-EWR - Connection: - - keep-alive - Content-Length: - - '122' - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:29:13 GMT - Server: - - cloudflare - request-id: - - req_014VdE8JFtyZZgtyAmYPD4pd - via: - - 1.1 google - x-cloud-trace-context: - - 99147895ac9c66c15e1de6063c141048 - x-should-retry: - - 'false' - status: - code: 400 - message: Bad Request -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml similarity index 100% rename from tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt.yaml rename to tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml similarity index 100% rename from tests/contrib/anthropic/cassettes/anthropic_completion_sync_multi_prompt_with_chat_history.yaml rename to tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml similarity index 100% rename from tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream.yaml rename to tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml deleted file mode 100644 index 247fd016a79..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_sync.yaml +++ /dev/null @@ -1,85 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], - "model": "claude-3-opus-20240229"}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '194' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0xPy2rDMBD8FbGnHmRw3EeormkPKT2FQilNMSLeSiLyytGumgTjfy8ODfQ0MC9m - RggdGOjZtfXi+egPzebnyb24j2W/csfN6v3BgwY5Dzi7kNk6BA05xZmwzIHFkoCGPnUYwcAu2tJh - dVuloXDV1M1d3TSPoGGXSJAEzOd4LRQ8zdELGHjzqAafLaPawlqJD7TXSjxm/E4Z1VrZfgvqJuXg - AtkYzyqQerUSSFmG6UsDSxrajJYTzXvtqZW0R2L4kxgPBWmHYKjEqKFc/pgRAg1FrmbTLDWkIv+p - xf00/QIAAP//AwAjDM/sLQEAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e24ceedab20cb0-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Mon, 03 Jun 2024 20:05:54 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '1' - anthropic-ratelimit-requests-reset: - - '2024-06-03T20:05:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '9000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:05:57Z' - request-id: - - req_01APGLDxmWmg64SznQbxJTHy - via: - - 1.1 google - x-cloud-trace-context: - - 0c2fa5913c47bc6b0a3e8a2661af4a7b - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 2ac27b1dfc7..859ae0dbdc6 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -2,6 +2,9 @@ from tests.utils import override_global_config +from .utils import process_tool_call +from .utils import tools + def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): """ @@ -12,7 +15,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac """ llm = anthropic.Anthropic() with override_global_config(dict(service="test-svc", env="staging", version="1234")): - cassette_name = "anthropic_completion_sync.yaml" + cassette_name = "anthropic_completion.yaml" with request_vcr.use_cassette(cassette_name): llm.messages.create( model="claude-3-opus-20240229", @@ -29,11 +32,13 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac assert span.get_tag("anthropic.request.api_key") == "sk-...key>" -# @pytest.mark.snapshot(ignores=["metrics.anthropic.tokens.total_cost", "resource"]) -@pytest.mark.snapshot() +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm", + ignores=["resource"] +) def test_anthropic_llm_sync(anthropic, request_vcr): llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_sync.yaml"): + with request_vcr.use_cassette("anthropic_completion.yaml"): llm.messages.create( model="claude-3-opus-20240229", max_tokens=15, @@ -51,10 +56,13 @@ def test_anthropic_llm_sync(anthropic, request_vcr): ) -@pytest.mark.snapshot() +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts", + ignores=["resource"] +) def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt.yaml"): + with request_vcr.use_cassette("anthropic_completion_multi_prompt.yaml"): llm.messages.create( model="claude-3-opus-20240229", max_tokens=15, @@ -70,10 +78,13 @@ def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): ) -@pytest.mark.snapshot() +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history", + ignores=["resource"] +) def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, request_vcr): llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_sync_multi_prompt_with_chat_history.yaml"): + with request_vcr.use_cassette("anthropic_completion_multi_prompt_with_chat_history.yaml"): llm.messages.create( model="claude-3-opus-20240229", max_tokens=30, @@ -100,7 +111,10 @@ def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, reques ) -@pytest.mark.snapshot(ignores=["meta.error.stack"]) +@pytest.mark.snapshot( + ignores=["meta.error.stack", "resource"], + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error" +) def test_anthropic_llm_error(anthropic, request_vcr): llm = anthropic.Anthropic() invalid_error = anthropic.BadRequestError @@ -109,10 +123,13 @@ def test_anthropic_llm_error(anthropic, request_vcr): llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) -@pytest.mark.snapshot() +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", + ignores=["resource"] +) def test_anthropic_llm_sync_stream(anthropic, request_vcr): llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_sync_stream.yaml"): + with request_vcr.use_cassette("anthropic_completion_stream.yaml"): stream = llm.messages.create( model="claude-3-opus-20240229", max_tokens=15, @@ -131,3 +148,261 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): ) for _ in stream: pass + + +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", + ignores=["resource"] +) +def test_anthropic_llm_sync_tools(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_tools.yaml"): + message = llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=200, + messages=[{"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}], + tools=tools, + ) + + if message.stop_reason == "tool_use": + tool_use = next(block for block in message.content if block.type == "tool_use") + tool_name = tool_use.name + tool_input = tool_use.input + + tool_result = process_tool_call(tool_name, tool_input) + + response = llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=500, + messages=[ + {"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}, + {"role": "assistant", "content": message.content}, + { + "role": "user", + "content": [ + { + "type": "tool_result", + "tool_use_id": tool_use.id, + "content": tool_result, + } + ], + }, + ], + tools=tools, + ) + else: + response = message + + final_response = next( + (block.text for block in response.content if hasattr(block, "text")), + None, + ) + assert final_response is not None + assert getattr(final_response, "content") is not None + + +# Async tests + + +@pytest.mark.asyncio +async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): + """ + When the global config UST tags are set + The service name should be used for all data + The env should be used for all data + The version should be used for all data + """ + llm = anthropic.AsyncAnthropic() + with override_global_config(dict(service="test-svc", env="staging", version="1234")): + cassette_name = "anthropic_completion.yaml" + with request_vcr.use_cassette(cassette_name): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], + ) + + span = mock_tracer.pop_traces()[0][0] + assert span.resource == "AsyncMessages.create" + assert span.service == "test-svc" + assert span.get_tag("env") == "staging" + assert span.get_tag("version") == "1234" + assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" + assert span.get_tag("anthropic.request.api_key") == "sk-...key>" + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_context): + with snapshot_context( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic", + ignores=["resource"] + ): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + } + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_multiple_prompts_no_history(anthropic, request_vcr, snapshot_context): + with snapshot_context( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history", + ignores=["resource"] + ): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_multi_prompt.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, I am looking for information about some books!"}, + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + }, + ], + } + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, request_vcr, snapshot_context): + with snapshot_context( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history", + ignores=["resource"] + ): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_multi_prompt_with_chat_history.yaml"): + await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=30, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, Start all responses with your name Claude."}, + {"type": "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}, + ], + }, + {"role": "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Add the time and date to the beginning of your response after your name.", + }, + {"type": "text", "text": "Explain string theory succinctly to a complete noob."}, + ], + }, + ], + ) + + +@pytest.mark.asyncio +async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_context): + with snapshot_context( + ignores=["meta.error.stack", "resource"], + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error" + ): + llm = anthropic.AsyncAnthropic() + invalid_error = anthropic.BadRequestError + with pytest.raises(invalid_error): + with request_vcr.use_cassette("anthropic_completion_error.yaml"): + await llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) + + +@pytest.mark.asyncio +async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): + with snapshot_context( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", + ignores=["resource"] + ): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_stream.yaml"): + stream = await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + }, + ], + stream=True, + ) + async for _ in stream: + pass + + +@pytest.mark.snapshot( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", + ignores=["resource"] +) +async def test_anthropic_llm_async_tools(anthropic, request_vcr): + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_tools.yaml"): + message = await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=200, + messages=[{"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}], + tools=tools, + ) + + if message.stop_reason == "tool_use": + tool_use = next(block for block in message.content if block.type == "tool_use") + tool_name = tool_use.name + tool_input = tool_use.input + + tool_result = process_tool_call(tool_name, tool_input) + + response = await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=500, + messages=[ + {"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}, + {"role": "assistant", "content": message.content}, + { + "role": "user", + "content": [ + { + "type": "tool_result", + "tool_use_id": tool_use.id, + "content": tool_result, + } + ], + }, + ], + tools=tools, + ) + else: + response = message + + final_response = next( + (block.text for block in response.content if hasattr(block, "text")), + None, + ) + assert final_response is not None + assert getattr(final_response, "content") is not None diff --git a/tests/contrib/anthropic/test_anthropic_async.py b/tests/contrib/anthropic/test_anthropic_async.py deleted file mode 100644 index 5b723e0fc0c..00000000000 --- a/tests/contrib/anthropic/test_anthropic_async.py +++ /dev/null @@ -1,142 +0,0 @@ -import pytest - -from tests.utils import override_global_config - - -@pytest.mark.asyncio -async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): - """ - When the global config UST tags are set - The service name should be used for all data - The env should be used for all data - The version should be used for all data - """ - llm = anthropic.AsyncAnthropic() - with override_global_config(dict(service="test-svc", env="staging", version="1234")): - cassette_name = "anthropic_completion_async.yaml" - with request_vcr.use_cassette(cassette_name): - await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=15, - messages=[{"role": "user", "content": "What does Nietzsche mean by 'God is dead'?"}], - ) - - span = mock_tracer.pop_traces()[0][0] - assert span.resource == "AsyncMessages.create" - assert span.service == "test-svc" - assert span.get_tag("env") == "staging" - assert span.get_tag("version") == "1234" - assert span.get_tag("anthropic.request.model") == "claude-3-opus-20240229" - assert span.get_tag("anthropic.request.api_key") == "sk-...key>" - - -@pytest.mark.asyncio -# @pytest.mark.snapshot -async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_context): - with snapshot_context(): - llm = anthropic.AsyncAnthropic() - with request_vcr.use_cassette("anthropic_completion_async.yaml"): - await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=15, - messages=[ - { - "role": "user", - "content": [ - { - "type": "text", - "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - } - ], - } - ], - ) - - -@pytest.mark.asyncio -async def test_anthropic_llm_async_multiple_prompts_no_history(anthropic, request_vcr, snapshot_context): - with snapshot_context(): - llm = anthropic.AsyncAnthropic() - with request_vcr.use_cassette("anthropic_completion_async_multi_prompt.yaml"): - await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=15, - messages=[ - { - "role": "user", - "content": [ - {"type": "text", "text": "Hello, I am looking for information about some books!"}, - { - "type": "text", - "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - }, - ], - } - ], - ) - - -@pytest.mark.asyncio -async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, request_vcr, snapshot_context): - with snapshot_context(): - llm = anthropic.AsyncAnthropic() - with request_vcr.use_cassette("anthropic_completion_async_multi_prompt_with_chat_history.yaml"): - await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=30, - messages=[ - { - "role": "user", - "content": [ - {"type": "text", "text": "Hello, Start all responses with your name Claude."}, - {"type": "text", "text": "End all responses with [COPY, CLAUDE OVER AND OUT!]"}, - ], - }, - {"role": "assistant", "content": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]"}, - { - "role": "user", - "content": [ - { - "type": "text", - "text": "Add the time and date to the beginning of your response after your name.", - }, - {"type": "text", "text": "Explain string theory succinctly to a complete noob."}, - ], - }, - ], - ) - - -@pytest.mark.asyncio -async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_context): - with snapshot_context(ignores=["meta.error.stack"]): - llm = anthropic.AsyncAnthropic() - invalid_error = anthropic.BadRequestError - with pytest.raises(invalid_error): - with request_vcr.use_cassette("anthropic_completion_error_async.yaml"): - await llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) - - -@pytest.mark.asyncio -async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): - with snapshot_context(): - llm = anthropic.AsyncAnthropic() - with request_vcr.use_cassette("anthropic_completion_async_stream.yaml"): - stream = await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=15, - messages=[ - { - "role": "user", - "content": [ - { - "type": "text", - "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - } - ], - }, - ], - stream=True, - ) - async for _ in stream: - pass diff --git a/tests/contrib/anthropic/test_anthropic_patch.py b/tests/contrib/anthropic/test_anthropic_patch.py index a5732bf5902..52675cc1341 100644 --- a/tests/contrib/anthropic/test_anthropic_patch.py +++ b/tests/contrib/anthropic/test_anthropic_patch.py @@ -13,9 +13,12 @@ class TestAnthropicPatch(PatchTestCase.Base): def assert_module_patched(self, anthropic): self.assert_wrapped(anthropic.resources.messages.Messages.create) + self.assert_wrapped(anthropic.resources.messages.AsyncMessages.create) def assert_not_module_patched(self, anthropic): self.assert_not_wrapped(anthropic.resources.messages.Messages.create) + self.assert_not_wrapped(anthropic.resources.messages.AsyncMessages.create) def assert_not_module_double_patched(self, anthropic): self.assert_not_double_wrapped(anthropic.resources.messages.Messages.create) + self.assert_not_double_wrapped(anthropic.resources.messages.AsyncMessages.create) diff --git a/tests/contrib/anthropic/utils.py b/tests/contrib/anthropic/utils.py index c47812650cd..21a41dcdba7 100644 --- a/tests/contrib/anthropic/utils.py +++ b/tests/contrib/anthropic/utils.py @@ -1,12 +1,9 @@ import os +import re import vcr -def iswrapped(obj): - return hasattr(obj, "__dd_wrapped__") - - # VCR is used to capture and store network requests made to Anthropic. # This is done to avoid making real calls to the API which could introduce # flakiness and cost. @@ -28,3 +25,41 @@ def get_request_vcr(): # Ignore requests to the agent ignore_localhost=True, ) + + +# Anthropic Tools + + +def calculate(expression): + # Remove any non-digit or non-operator characters from the expression + expression = re.sub(r'[^0-9+\-*/().]', '', expression) + + try: + # Evaluate the expression using the built-in eval() function + result = eval(expression) + return str(result) + except (SyntaxError, ZeroDivisionError, NameError, TypeError, OverflowError): + return "Error: Invalid expression" + + +tools = [ + { + "name": "calculator", + "description": "A simple calculator that performs basic arithmetic operations.", + "input_schema": { + "type": "object", + "properties": { + "expression": { + "type": "string", + "description": "The mathematical expression to evaluate (e.g., '2 + 3 * 4')." + } + }, + "required": ["expression"] + } + } +] + + +def process_tool_call(tool_name, tool_input): + if tool_name == "calculator": + return calculate(tool_input["expression"]) diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json similarity index 87% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json index 32fca96e31c..d63f47cbb04 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e221e00000000", + "_dd.p.tid": "665f496b00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", @@ -22,7 +22,7 @@ "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, @@ -32,8 +32,8 @@ "anthropic.response.usage.input_tokens": 27, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 42, - "process_id": 95434 + "process_id": 62674 }, - "duration": 1633425000, - "start": 1717445150472691000 + "duration": 2476000, + "start": 1717520747849359000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json similarity index 78% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json index 27698130a1b..336ebf85a03 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_basic.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json @@ -10,19 +10,19 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e2c0700000000", + "_dd.p.tid": "665f496c00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", - "anthropic.response.completions.content.0.text": "The famous philosophical statement \"I think, therefore I am\" (originally in", + "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "8d2ef62d83884add8d544970c41bf728" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, @@ -32,8 +32,8 @@ "anthropic.response.usage.input_tokens": 27, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 42, - "process_id": 29818 + "process_id": 62674 }, - "duration": 1112700000, - "start": 1717447687466355000 + "duration": 2247000, + "start": 1717520748016945000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json similarity index 89% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json index 09d86cdd1de..eb1807f0fcc 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e222000000000", + "_dd.p.tid": "665f496b00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", @@ -24,7 +24,7 @@ "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, @@ -34,8 +34,8 @@ "anthropic.response.usage.input_tokens": 38, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 53, - "process_id": 95434 + "process_id": 62674 }, - "duration": 1951110000, - "start": 1717445152164436000 + "duration": 2793000, + "start": 1717520747889584000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json similarity index 89% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json index cb1f1c01df5..5ffb3fa9431 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_no_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e2bf800000000", + "_dd.p.tid": "665f496c00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", @@ -24,7 +24,7 @@ "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "b9f16b2ca36b405485b2d3fb6a735bd0" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, @@ -34,8 +34,8 @@ "anthropic.response.usage.input_tokens": 38, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 53, - "process_id": 28997 + "process_id": 62674 }, - "duration": 547612000, - "start": 1717447672414622000 + "duration": 2652000, + "start": 1717520748050099000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json similarity index 92% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json index 71bb629c7ab..f7fc39fcf24 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e220a00000000", + "_dd.p.tid": "665f496b00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", "anthropic.request.messages.0.content.0.type": "text", @@ -32,7 +32,7 @@ "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, @@ -42,8 +42,8 @@ "anthropic.response.usage.input_tokens": 84, "anthropic.response.usage.output_tokens": 30, "anthropic.response.usage.total_tokens": 114, - "process_id": 95434 + "process_id": 62674 }, - "duration": 2371348000, - "start": 1717445130515094000 + "duration": 3568000, + "start": 1717520747916500000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json similarity index 82% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json index 1db5f3ca452..46c95fc19ba 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e221c00000000", + "_dd.p.tid": "665f496b00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", @@ -18,15 +18,15 @@ "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 95434 + "process_id": 62674 }, - "duration": 1912334000, - "start": 1717445148270890000 + "duration": 2040000, + "start": 1717520747965547000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json new file mode 100644 index 00000000000..ec7a852ed1c --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json @@ -0,0 +1,35 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 1, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665f53c900000000", + "anthropic.request.api_key": "sk-...vAAA", + "anthropic.request.messages.0.content.0.text": "What is the result of 1,984,135 * 9,343,116?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 200}", + "error.message": "Messages.create() got an unexpected keyword argument 'tools'", + "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 96, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\nTypeError: Messages.create() got an unexpected keyword argument 'tools'\n", + "error.type": "builtins.TypeError", + "language": "python", + "runtime-id": "d480688f154a40d5a088457aabfa9956" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 7834 + }, + "duration": 1141864000, + "start": 1717523401024586000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json deleted file mode 100644 index cc88e38f4ee..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_multiple_prompts_with_chat_history.json +++ /dev/null @@ -1,49 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.create", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665e2be900000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.content.1.text": "End all responses with [COPY, CLAUDE OVER AND OUT!]", - "anthropic.request.messages.0.content.1.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.messages.1.content.0.text": "Claude: Sure! [COPY, CLAUDE OVER AND OUT!]", - "anthropic.request.messages.1.content.0.type": "text", - "anthropic.request.messages.1.role": "assistant", - "anthropic.request.messages.2.content.0.text": "Add the time and date to the beginning of your response after your name.", - "anthropic.request.messages.2.content.0.type": "text", - "anthropic.request.messages.2.content.1.text": "Explain string theory succinctly to a complete noob.", - "anthropic.request.messages.2.content.1.type": "text", - "anthropic.request.messages.2.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 30}", - "anthropic.response.completions.content.0.text": "Claude: It is currently Thursday, May 18, 2023 at 10:02 PM. String theory is a theoretical framework in", - "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "max_tokens", - "anthropic.response.completions.role": "assistant", - "language": "python", - "runtime-id": "c0229f435efe410daec373a127583690" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 84, - "anthropic.response.usage.output_tokens": 30, - "anthropic.response.usage.total_tokens": 114, - "process_id": 28083 - }, - "duration": 590200000, - "start": 1717447657168311000 - }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json deleted file mode 100644 index 469af165d6e..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream.json +++ /dev/null @@ -1,32 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.create", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665e2bd900000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", - "language": "python", - "runtime-id": "d4ce0d37e8c64f0aa013b92c180cbb42" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 27167 - }, - "duration": 1165397000, - "start": 1717447641142774000 - }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json deleted file mode 100644 index 5439214068f..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_error_async.json +++ /dev/null @@ -1,32 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.create", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 1, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665e30d300000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", - "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", - "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/async_message.py\", line 72, in traced_async_chat_model_generate\n chat_completions = await func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 1856, in create\n return await self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1789, in post\n return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1492, in request\n return await self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1583, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", - "error.type": "anthropic.BadRequestError", - "language": "python", - "runtime-id": "11b1816282c84f6fa4d62f19c6833546" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 93267 - }, - "duration": 2707000, - "start": 1717448915658855000 - }]] From 785ada9f9069d01f78d9bf4e4a6233b132cf11ca Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 14:23:39 -0400 Subject: [PATCH 18/33] add tools tests --- .riot/requirements/1d5589b.txt | 48 +++++++ .riot/requirements/ceb0f20.txt | 48 +++++++ .../anthropic_completion_tools_part_1.yaml | 93 ++++++++++++ .../anthropic_completion_tools_part_2.yaml | 100 +++++++++++++ tests/contrib/anthropic/test_anthropic.py | 135 +++++++++--------- tests/contrib/anthropic/utils.py | 12 +- ...st_anthropic.test_anthropic_llm_tools.json | 70 +++++++-- 7 files changed, 419 insertions(+), 87 deletions(-) create mode 100644 .riot/requirements/1d5589b.txt create mode 100644 .riot/requirements/ceb0f20.txt create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml diff --git a/.riot/requirements/1d5589b.txt b/.riot/requirements/1d5589b.txt new file mode 100644 index 00000000000..150dc654846 --- /dev/null +++ b/.riot/requirements/1d5589b.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/1d5589b.in +# +annotated-types==0.7.0 +anthropic==0.28.0 +anyio==4.4.0 +attrs==23.2.0 +certifi==2024.6.2 +charset-normalizer==3.3.2 +coverage[toml]==7.5.3 +distro==1.9.0 +exceptiongroup==1.2.1 +filelock==3.14.0 +fsspec==2024.6.0 +h11==0.14.0 +httpcore==1.0.5 +httpx==0.27.0 +huggingface-hub==0.23.2 +hypothesis==6.45.0 +idna==3.7 +iniconfig==2.0.0 +jiter==0.4.1 +mock==5.1.0 +multidict==6.0.5 +opentracing==2.4.0 +packaging==24.0 +pluggy==1.5.0 +pydantic==2.7.3 +pydantic-core==2.18.4 +pytest==8.2.2 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pyyaml==6.0.1 +requests==2.32.3 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tokenizers==0.19.1 +tomli==2.0.1 +tqdm==4.66.4 +typing-extensions==4.12.1 +urllib3==2.2.1 +vcrpy==6.0.1 +wrapt==1.16.0 +yarl==1.9.4 diff --git a/.riot/requirements/ceb0f20.txt b/.riot/requirements/ceb0f20.txt new file mode 100644 index 00000000000..a8be801ba17 --- /dev/null +++ b/.riot/requirements/ceb0f20.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/ceb0f20.in +# +annotated-types==0.7.0 +anthropic==0.28.0 +anyio==4.4.0 +attrs==23.2.0 +certifi==2024.6.2 +charset-normalizer==3.3.2 +coverage[toml]==7.5.3 +distro==1.9.0 +exceptiongroup==1.2.1 +filelock==3.14.0 +fsspec==2024.6.0 +h11==0.14.0 +httpcore==1.0.5 +httpx==0.27.0 +huggingface-hub==0.23.2 +hypothesis==6.45.0 +idna==3.7 +iniconfig==2.0.0 +jiter==0.4.1 +mock==5.1.0 +multidict==6.0.5 +opentracing==2.4.0 +packaging==24.0 +pluggy==1.5.0 +pydantic==2.7.3 +pydantic-core==2.18.4 +pytest==8.2.2 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pyyaml==6.0.1 +requests==2.32.3 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tokenizers==0.19.1 +tomli==2.0.1 +tqdm==4.66.4 +typing-extensions==4.12.1 +urllib3==2.2.1 +vcrpy==6.0.1 +wrapt==1.16.0 +yarl==1.9.4 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml new file mode 100644 index 00000000000..3cadcbfe185 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml @@ -0,0 +1,93 @@ +interactions: +- request: + body: '{"max_tokens": 200, "messages": [{"role": "user", "content": "What is the + result of 1,984,135 * 9,343,116?"}], "model": "claude-3-opus-20240229", "tools": + [{"name": "calculator", "description": "A simple calculator that performs basic + arithmetic operations.", "input_schema": {"type": "object", "properties": {"expression": + {"type": "string", "description": "The mathematical expression to evaluate (e.g., + ''2 + 3 * 4'')."}}, "required": ["expression"]}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '454' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA2xT227bMAz9FYKPg5PFSRY0xjCgxbZ2exjQIi/dMgSczMZaZckVqbRZkH8f5KBJ + d3kyxMvhOYf0Dm2NFbayXo3K2eIprbc/P9eXd7+aydftbRk+XFssULcd5yoWoTVjgTG4HCARK0pe + scA21OywQuMo1TyYDEKXZDAejaej8XiOBZrglb1i9W33DKj8lFv7T4VvtbH+3vr1u6VfBCAvjxxB + GyvwkFjUBl+ANgyGnEmONETQEBxY6cNtEIXIjjfkFcR6w2AVDHnoON6F2AJFq03Lag2EjiNlTAFn + 7xna5NR2zpo+OITFfwZFfkg2sgBl+LVj6ChSy8qxWvoB8FMXWcQGX/X9LWnDLak15F4kQQPwhlwi + 5aVf+lyahCM0JNDFsLE110DO9bI8m+x73IL1WUTPD6zPSRuP1mTQLnSZLh9MO3I7iPlz/rM0zu4t + sZyfTcvJG3gF88l0UpazJQ4ztS8BgjZ80C9AMfPhmusCSP5eRna6tpGNuu1pfXykOIRzdzSxPvE7 + 4NKGrKMfjguQAJ8Oe4vBMNfwaLWBlvJx9Ij9Ngw5N1z6t6+Pd4P74nRbIbhVknyt/Ynnd1qNypvF + xceLhbm8uT6fp3l4f3E7n11dYYGe2tx30pM7fZcUqx2ezMPqX7Nwv/9eoGjoVpFJ+qIX8/uE8ENi + bxgrn5wrMPW/UrU7zFhpuGcvWM2mowJD0pexcna23/8GAAD//wMA9J1CmqoDAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e9ec12ece64222-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Tue, 04 Jun 2024 18:18:02 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '5' + anthropic-ratelimit-requests-reset: + - '2024-06-04T18:18:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-04T18:18:57Z' + request-id: + - req_01Doh23AovTtfiFkjBhh3ZWF + via: + - 1.1 google + x-cloud-trace-context: + - 628a594739971f53602fac0e1b9395b7 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml new file mode 100644 index 00000000000..40bfa00d33e --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml @@ -0,0 +1,100 @@ +interactions: +- request: + body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the + result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": + "\nTo answer this question, the calculator tool is the most relevant + since it can perform arithmetic operations like multiplication. The calculator + tool requires a single parameter:\n- expression: The mathematical expression + to evaluate\n\nThe user has provided all the necessary information in their + question to populate this parameter. The expression to calculate is \"1984135 + * 9343116\".\n\nNo other tools are needed, as the calculator can directly answer + the question. All required parameters are available, so I can proceed with making + the tool call.\n", "type": "text"}, {"id": "toolu_01RTBFBTcGRQA9u9oDBY96HH", + "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": + "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": + "toolu_01RTBFBTcGRQA9u9oDBY96HH", "content": "18538003464660"}]}], "model": + "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A + simple calculator that performs basic arithmetic operations.", "input_schema": + {"type": "object", "properties": {"expression": {"type": "string", "description": + "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": + ["expression"]}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '1362' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0yP3UrDQBCFX2U5lzKVJJuEZK+L3hVEEUQkxGSaRNPdNDMLSum7S4uCVwe+8wPn + hKmHw0GGJkkfx+G49f3uLit2L8/30/T+8LEVEPR74UuKRdqBQVjDfAGtyCTaegXhEHqe4dDNbex5 + YzdhibLJkixPsqwGoQte2Svc6+lvUPnrUr2Kw9PIK+/DymR0ZLOyxFlN2JuU6iqn1BbmxtRkc0tp + WppJTFpRYStKEkt5mVNZJrc4vxFEw9Ks3ErwcGDfNxpXj19D+BjZdwzn4zwT4vWUO2HyS9RGwyd7 + gauynBCi/me2PJ9/AAAA//8DAPgqkqEzAQAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88e9ec564ef64222-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Tue, 04 Jun 2024 18:18:05 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-04T18:18:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '9000' + anthropic-ratelimit-tokens-reset: + - '2024-06-04T18:18:57Z' + request-id: + - req_01Kyz3hB1MjdDHfFRfMoReTc + via: + - 1.1 google + x-cloud-trace-context: + - 155c7bc323534f58baa0f8149ca58e5a + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 859ae0dbdc6..ffaa9b294de 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -1,11 +1,16 @@ +import anthropic as anthropic_module import pytest +from ddtrace.internal.utils.version import parse_version from tests.utils import override_global_config from .utils import process_tool_call from .utils import tools +ANTHROPIC_VERSION = parse_version(anthropic_module.__version__) + + def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_tracer): """ When the global config UST tags are set @@ -32,10 +37,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac assert span.get_tag("anthropic.request.api_key") == "sk-...key>" -@pytest.mark.snapshot( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm", - ignores=["resource"] -) +@pytest.mark.snapshot(token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm", ignores=["resource"]) def test_anthropic_llm_sync(anthropic, request_vcr): llm = anthropic.Anthropic() with request_vcr.use_cassette("anthropic_completion.yaml"): @@ -57,8 +59,7 @@ def test_anthropic_llm_sync(anthropic, request_vcr): @pytest.mark.snapshot( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts", - ignores=["resource"] + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts", ignores=["resource"] ) def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): llm = anthropic.Anthropic() @@ -80,7 +81,7 @@ def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): @pytest.mark.snapshot( token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history", - ignores=["resource"] + ignores=["resource"], ) def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, request_vcr): llm = anthropic.Anthropic() @@ -112,8 +113,7 @@ def test_anthropic_llm_sync_multiple_prompts_with_chat_history(anthropic, reques @pytest.mark.snapshot( - ignores=["meta.error.stack", "resource"], - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error" + ignores=["meta.error.stack", "resource"], token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error" ) def test_anthropic_llm_error(anthropic, request_vcr): llm = anthropic.Anthropic() @@ -123,10 +123,7 @@ def test_anthropic_llm_error(anthropic, request_vcr): llm.messages.create(model="claude-3-opus-20240229", max_tokens=15, messages=["Invalid content"]) -@pytest.mark.snapshot( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", - ignores=["resource"] -) +@pytest.mark.snapshot(token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", ignores=["resource"]) def test_anthropic_llm_sync_stream(anthropic, request_vcr): llm = anthropic.Anthropic() with request_vcr.use_cassette("anthropic_completion_stream.yaml"): @@ -150,13 +147,11 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): pass -@pytest.mark.snapshot( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", - ignores=["resource"] -) +@pytest.mark.snapshot(token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", ignores=["resource"]) +@pytest.mark.skipif(ANTHROPIC_VERSION < (0, 27), reason="Anthropic Tools not available until 0.27.0, skipping.") def test_anthropic_llm_sync_tools(anthropic, request_vcr): llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_tools.yaml"): + with request_vcr.use_cassette("anthropic_completion_tools_part_1.yaml"): message = llm.messages.create( model="claude-3-opus-20240229", max_tokens=200, @@ -164,6 +159,7 @@ def test_anthropic_llm_sync_tools(anthropic, request_vcr): tools=tools, ) + with request_vcr.use_cassette("anthropic_completion_tools_part_2.yaml"): if message.stop_reason == "tool_use": tool_use = next(block for block in message.content if block.type == "tool_use") tool_name = tool_use.name @@ -198,7 +194,6 @@ def test_anthropic_llm_sync_tools(anthropic, request_vcr): None, ) assert final_response is not None - assert getattr(final_response, "content") is not None # Async tests @@ -234,8 +229,7 @@ async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vc @pytest.mark.asyncio async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_context): with snapshot_context( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic", - ignores=["resource"] + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic", ignores=["resource"] ): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion.yaml"): @@ -260,7 +254,7 @@ async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_contex async def test_anthropic_llm_async_multiple_prompts_no_history(anthropic, request_vcr, snapshot_context): with snapshot_context( token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history", - ignores=["resource"] + ignores=["resource"], ): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion_multi_prompt.yaml"): @@ -286,7 +280,7 @@ async def test_anthropic_llm_async_multiple_prompts_no_history(anthropic, reques async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, request_vcr, snapshot_context): with snapshot_context( token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history", - ignores=["resource"] + ignores=["resource"], ): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion_multi_prompt_with_chat_history.yaml"): @@ -320,7 +314,7 @@ async def test_anthropic_llm_async_multiple_prompts_with_chat_history(anthropic, async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_context): with snapshot_context( ignores=["meta.error.stack", "resource"], - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error" + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error", ): llm = anthropic.AsyncAnthropic() invalid_error = anthropic.BadRequestError @@ -332,8 +326,7 @@ async def test_anthropic_llm_error_async(anthropic, request_vcr, snapshot_contex @pytest.mark.asyncio async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_context): with snapshot_context( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", - ignores=["resource"] + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream", ignores=["resource"] ): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion_stream.yaml"): @@ -357,52 +350,52 @@ async def test_anthropic_llm_async_stream(anthropic, request_vcr, snapshot_conte pass -@pytest.mark.snapshot( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", - ignores=["resource"] -) -async def test_anthropic_llm_async_tools(anthropic, request_vcr): - llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_completion_tools.yaml"): - message = await llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=200, - messages=[{"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}], - tools=tools, - ) - - if message.stop_reason == "tool_use": - tool_use = next(block for block in message.content if block.type == "tool_use") - tool_name = tool_use.name - tool_input = tool_use.input - - tool_result = process_tool_call(tool_name, tool_input) - - response = await llm.messages.create( +@pytest.mark.skipif(ANTHROPIC_VERSION < (0, 27), reason="Anthropic Tools not available until 0.27.0, skipping.") +async def test_anthropic_llm_async_tools(anthropic, request_vcr, snapshot_context): + with snapshot_context( + token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools", ignores=["resource"] + ): + llm = anthropic.AsyncAnthropic() + with request_vcr.use_cassette("anthropic_completion_tools_part_1.yaml"): + message = await llm.messages.create( model="claude-3-opus-20240229", - max_tokens=500, - messages=[ - {"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}, - {"role": "assistant", "content": message.content}, - { - "role": "user", - "content": [ - { - "type": "tool_result", - "tool_use_id": tool_use.id, - "content": tool_result, - } - ], - }, - ], + max_tokens=200, + messages=[{"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}], tools=tools, ) - else: - response = message - final_response = next( - (block.text for block in response.content if hasattr(block, "text")), - None, - ) - assert final_response is not None - assert getattr(final_response, "content") is not None + with request_vcr.use_cassette("anthropic_completion_tools_part_2.yaml"): + if message.stop_reason == "tool_use": + tool_use = next(block for block in message.content if block.type == "tool_use") + tool_name = tool_use.name + tool_input = tool_use.input + + tool_result = process_tool_call(tool_name, tool_input) + + response = await llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=500, + messages=[ + {"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}, + {"role": "assistant", "content": message.content}, + { + "role": "user", + "content": [ + { + "type": "tool_result", + "tool_use_id": tool_use.id, + "content": tool_result, + } + ], + }, + ], + tools=tools, + ) + else: + response = message + + final_response = next( + (block.text for block in response.content if hasattr(block, "text")), + None, + ) + assert final_response is not None diff --git a/tests/contrib/anthropic/utils.py b/tests/contrib/anthropic/utils.py index 21a41dcdba7..9420c88b7d3 100644 --- a/tests/contrib/anthropic/utils.py +++ b/tests/contrib/anthropic/utils.py @@ -32,15 +32,15 @@ def get_request_vcr(): def calculate(expression): # Remove any non-digit or non-operator characters from the expression - expression = re.sub(r'[^0-9+\-*/().]', '', expression) - + expression = re.sub(r"[^0-9+\-*/().]", "", expression) + try: # Evaluate the expression using the built-in eval() function result = eval(expression) return str(result) except (SyntaxError, ZeroDivisionError, NameError, TypeError, OverflowError): return "Error: Invalid expression" - + tools = [ { @@ -51,11 +51,11 @@ def calculate(expression): "properties": { "expression": { "type": "string", - "description": "The mathematical expression to evaluate (e.g., '2 + 3 * 4')." + "description": "The mathematical expression to evaluate (e.g., '2 + 3 * 4').", } }, - "required": ["expression"] - } + "required": ["expression"], + }, } ] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json index ec7a852ed1c..045c16a823e 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json @@ -7,29 +7,79 @@ "span_id": 1, "parent_id": 0, "type": "", - "error": 1, + "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f53c900000000", - "anthropic.request.api_key": "sk-...vAAA", + "_dd.p.tid": "665f5aa900000000", + "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "What is the result of 1,984,135 * 9,343,116?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", "anthropic.request.parameters": "{\"max_tokens\": 200}", - "error.message": "Messages.create() got an unexpected keyword argument 'tools'", - "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 96, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\nTypeError: Messages.create() got an unexpected keyword argument 'tools'\n", - "error.type": "builtins.TypeError", + "anthropic.response.completions.content.0.text": "\\nTo answer this question, the calculator tool is the most relevant since it can perform arithmetic operations like mu...", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.content.1.type": "tool_use", + "anthropic.response.completions.finish_reason": "tool_use", + "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "d480688f154a40d5a088457aabfa9956" + "runtime-id": "505db4a9bdda41429de2cc066a67aa7c" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 7834 + "anthropic.response.usage.input_tokens": 640, + "anthropic.response.usage.output_tokens": 168, + "anthropic.response.usage.total_tokens": 808, + "process_id": 1444 }, - "duration": 1141864000, - "start": 1717523401024586000 + "duration": 24166000, + "start": 1717525161190237000 + }], +[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 1, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "665f5aa900000000", + "anthropic.request.api_key": "sk-...key>", + "anthropic.request.messages.0.content.0.text": "What is the result of 1,984,135 * 9,343,116?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.role": "user", + "anthropic.request.messages.1.content.0.text": "\\nTo answer this question, the calculator tool is the most relevant since it can perform arithmetic operations like mu...", + "anthropic.request.messages.1.content.0.type": "text", + "anthropic.request.messages.1.content.1.type": "tool_use", + "anthropic.request.messages.1.role": "assistant", + "anthropic.request.messages.2.content.0.type": "tool_result", + "anthropic.request.messages.2.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 500}", + "anthropic.response.completions.content.0.text": "Therefore, the result of 1,984,135 * 9,343,116 is 18,538,003,464,660.", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "505db4a9bdda41429de2cc066a67aa7c" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 824, + "anthropic.response.usage.output_tokens": 36, + "anthropic.response.usage.total_tokens": 860, + "process_id": 1444 + }, + "duration": 5855000, + "start": 1717525161218346000 }]] From cd8cc3974939259106011d744be24c0f0dd1d6d8 Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 14:26:40 -0400 Subject: [PATCH 19/33] update riotfile --- riotfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riotfile.py b/riotfile.py index 842f0c1cefe..ee94d296f9d 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2540,7 +2540,7 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): pkgs={ "pytest-asyncio": latest, "vcrpy": latest, - "anthropic": latest, + "anthropic": [latest, "~=0.28", "~=0.26"], }, ), Venv( From 8c75f658919666d71b969194782b7d366dd43aac Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 4 Jun 2024 14:41:55 -0400 Subject: [PATCH 20/33] fix snapshots / cassettes --- .../cassettes/anthropic_completion.yaml | 84 +++++++ .../cassettes/anthropic_completion_error.yaml | 12 +- .../anthropic_completion_multi_prompt.yaml | 32 ++- ...letion_multi_prompt_with_chat_history.yaml | 29 ++- .../anthropic_completion_stream.yaml | 55 ++--- .../anthropic_completion_tools_part_1.yaml | 38 +-- .../anthropic_completion_tools_part_2.yaml | 221 ++++++++++++++++-- ...pic.test_anthropic.test_anthropic_llm.json | 18 +- ...st_anthropic.test_anthropic_llm_basic.json | 18 +- ...st_anthropic.test_anthropic_llm_error.json | 14 +- ...c.test_anthropic_llm_multiple_prompts.json | 12 +- ...ropic_llm_multiple_prompts_no_history.json | 12 +- ...lm_multiple_prompts_with_chat_history.json | 14 +- ...t_anthropic.test_anthropic_llm_stream.json | 12 +- ...st_anthropic.test_anthropic_llm_tools.json | 58 ++--- 15 files changed, 446 insertions(+), 183 deletions(-) create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion.yaml diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion.yaml new file mode 100644 index 00000000000..e7713de9b35 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion.yaml @@ -0,0 +1,84 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": "What does + Nietzsche mean by ''God is dead''?"}], "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '142' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.26.1 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.26.1 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0xPyWrDMBT8FTFnGRw1wVS3npJDySXQhaYYYb3EIrLk+knFqfG/F4cGehqYjZkJ + zkKj43Ndrrbj01vF34fqEMzleUeb6n03vEAiXXtaXMRszgSJIfqFMMyOkwkJEl205KHReJMtFQ9F + 7DMXqlTrUqlHSDQxJAoJ+mO6FyYal+gNNF5bCmLvKP1w05I4mS5m9ldhqfFmICuO2EYrHAtLxh4h + XBCtY8yfEpxiXw9kOIZlqBnrFC8UGH8S01em0BB0yN5L5NsRPcGFPqe7WSslEXP6T6028/wLAAD/ + /wMAVY+ZxCYBAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88ea0b65093d42c0-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Tue, 04 Jun 2024 18:39:16 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '4' + anthropic-ratelimit-requests-reset: + - '2024-06-04T18:39:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '10000' + anthropic-ratelimit-tokens-reset: + - '2024-06-04T18:39:57Z' + request-id: + - req_01A5sdFuYR9e3QLNLdpFz2g5 + via: + - 1.1 google + x-cloud-trace-context: + - 35c62374de27fdf8dc2fe68a12494c9e + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml index 9a62acea110..05f66f54982 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_error.yaml @@ -17,7 +17,7 @@ interactions: host: - api.anthropic.com user-agent: - - Anthropic/Python 0.28.0 + - Anthropic/Python 0.26.1 x-stainless-arch: - arm64 x-stainless-async: @@ -27,7 +27,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.28.0 + - 0.26.1 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -42,7 +42,7 @@ interactions: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e24ced7c9f4265-EWR + - 88ea0b8f5b99c324-EWR Connection: - keep-alive Content-Length: @@ -50,15 +50,15 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 20:05:52 GMT + - Tue, 04 Jun 2024 18:39:21 GMT Server: - cloudflare request-id: - - req_01LsGyzwBtnCxAUjeyT4tmSs + - req_01BjwuzUpsYgKjQJguJyvKb3 via: - 1.1 google x-cloud-trace-context: - - bbb9c1f0aa9c1d6f521121102e32ca2d + - 72a7a5ddf1a822fbbef5e3429c86b4f9 x-should-retry: - 'false' status: diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml index fbd3e79ade3..589aa0e6126 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml @@ -20,7 +20,7 @@ interactions: host: - api.anthropic.com user-agent: - - Anthropic/Python 0.28.0 + - Anthropic/Python 0.26.1 x-stainless-arch: - arm64 x-stainless-async: @@ -30,7 +30,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.28.0 + - 0.26.1 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -40,16 +40,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgSW2pe+vNil7Ek1bCmEyTpZvddWdWWkL+u6RY8PTgffHe - BLYDAyP3TVmt33e715e2/am2j5v+CzfHlOITaJBLpMVFzNgTaEjBLQQyWxb0AhrG0JEDA63D3FGx - KkLMXNRlfV/W9QNoaIMX8gLmY7oVCp2X6BUMvA2k4pCQSR1gr2Sw/qSVDJToGBKpvcLxAOouJNtb - j85dlPXqGcV6hQzzpwaWEJtEyMEve/HcSDiRZ/iTmL4z+ZbA+Oychnz9YyawPma5mc1qqyFk+U9V - 63n+BQAA//8DAG0IyPstAQAA + H4sIAAAAAAAAA0xPTUvDQBD9K8ucPGwgTawfe1ahYD2IIGIlLMmYrNnMpjuzklry3yXFgqcH74v3 + juAaMDBwW+Wrcv+9nh6u315/rviuf74PX9unbQsa5DDi4kJm2yJoiMEvhGV2LJYENAyhQQ8Gam9T + g1mZhTFxVuTFZV4Ut6ChDiRIAub9eC4UnJboCQy8dKjGLlpGtYONks5Rr5V0GPEzRFQbZYcdqIsQ + XevIen9QjtSjFUfKMswfGljCWEW0HGjZa6dKQo/E8Ccx7hNSjWAoea8hnf6YIzgak5zNprzREJL8 + p1bref4FAAD//wMAZQP45C0BAAA= headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e24cf95a3941d9-EWR + - 88ea0b71e8507c82-EWR Connection: - keep-alive Content-Encoding: @@ -57,7 +57,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 20:05:56 GMT + - Tue, 04 Jun 2024 18:39:19 GMT Server: - cloudflare Transfer-Encoding: @@ -65,23 +65,21 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '0' + - '3' anthropic-ratelimit-requests-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - - '9000' + - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' request-id: - - req_01X3Zqqeshn8FCupaMdEgqRS - retry-after: - - '1' + - req_01JNYCpsfn56US1343mqs9v9 via: - 1.1 google x-cloud-trace-context: - - 0794e94ca00c706013360048aa1bc46a + - 5395efa3d90bb0ad95ecd99eb0f0d363 status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml index 20c469c774e..5c1b3a1f547 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt_with_chat_history.yaml @@ -23,7 +23,7 @@ interactions: host: - api.anthropic.com user-agent: - - Anthropic/Python 0.28.0 + - Anthropic/Python 0.26.1 x-stainless-arch: - arm64 x-stainless-async: @@ -33,7 +33,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.28.0 + - 0.26.1 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -43,17 +43,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA0yQUUvDUAyF/0rIk0ILXTunvW9jPvkk+iAiMkKbtZe1uV2Ty1bG/rt0OvAp4eQ7 - B07O6Gt02GuzzRbP5erjZVq/va6nSh93x+P+c7M6YII2DTxTrEoNY4Jj6GaBVL0aiWGCfai5Q4dV - R7HmtEjDEDXNs3yZ5XmJCVZBjMXQfZ1vgcan2XodDjdXJ9zlWV6kWZFmJSxWbvFw7+DdRi8NWMth - nMAr0O/O5ivqYDdSz8cw7sELDO2kvlKwlgzIjPvBFCxAFL+b4BBJLPbQc9WSzCBJDQ0Lj9Th5TtB - tTBsRyYNMpem09bCnkXx76R8iCwVo5PYdQnG61PcGb0M0W6we1omGKL9l4rscvkBAAD//wMAf6mf - SHIBAAA= + H4sIAAAAAAAAA0yQQWsCQQyF/0rIecXtuBad60IPvUppoRYZxrgO7mTGTYa6iP+9rK3QUx7vfS+Q + XDHs0WKUblc/bV4Obz5+tK/p3Y+txGezXrYdVqhjpokiEdcRVjikfjKcSBB1rFhhTHvq0aLvXdnT + bDFLucjM1KapjVljhT6xEivaz+tjodJlqt6HxfbetNDMTT03tVnAyjbLHGHLW97oELgDPVIaRggC + 7leTBu96OAwu0ncaThAY8nGU4AX06BScKsWsApqgcDiMcC6OtUS8fVUomvJuICeJpwPdZafpRCz4 + FwmdC7EntFz6vsJyf4C9YuBc9AHbVVNhKvrfWtS32w8AAAD//wMAnC/nG14BAAA= headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e24c723f1e8c71-EWR + - 88ea0b809ed00fa5-EWR Connection: - keep-alive Content-Encoding: @@ -61,7 +60,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 20:05:35 GMT + - Tue, 04 Jun 2024 18:39:21 GMT Server: - cloudflare Transfer-Encoding: @@ -69,21 +68,21 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '4' + - '2' anthropic-ratelimit-requests-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' request-id: - - req_01DgqqUcVyhvARruFHNFA9pG + - req_01JCE3RjQzBhxViGz6sXDxBk via: - 1.1 google x-cloud-trace-context: - - d47e1ebd73e92fe1f28d8b0b5b336751 + - d055b6dd5dc489a3b5c6f453c572c24f status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml index b8aa6c3c194..06baa0cb61c 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml @@ -19,7 +19,7 @@ interactions: host: - api.anthropic.com user-agent: - - Anthropic/Python 0.28.0 + - Anthropic/Python 0.26.1 x-stainless-arch: - arm64 x-stainless-async: @@ -29,7 +29,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.28.0 + - 0.26.1 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -40,12 +40,12 @@ interactions: body: string: 'event: message_start - data: {"type":"message_start","message":{"id":"msg_01Ea8X6hVwT5cbZ6VCiv38Au","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } + data: {"type":"message_start","message":{"id":"msg_01PgicqXb8hKdXEPHm3LLTGF","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } event: content_block_start - data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } + data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } event: ping @@ -55,75 +55,76 @@ interactions: event: content_block_delta - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} + } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - phrase"} } + phrase"} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - \""} } + \""} } event: content_block_delta - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - think"} } + think"} } event: content_block_delta - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - therefore"} } + therefore"} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - I"} } + I"} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - am"} } + am"} } event: content_block_delta - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - ("} } + ("} } event: content_block_delta - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"}} + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - in"} } + in"} } event: content_block_delta @@ -135,22 +136,22 @@ interactions: event: content_block_delta data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - as"} } + as"} } event: content_block_stop - data: {"type":"content_block_stop","index":0} + data: {"type":"content_block_stop","index":0 } event: message_delta - data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } + data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } event: message_stop - data: {"type":"message_stop" } + data: {"type":"message_stop" } ' @@ -158,7 +159,7 @@ interactions: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e24ce0ff9e8c51-EWR + - 88ea0b905a064382-EWR Cache-Control: - no-cache Connection: @@ -166,7 +167,7 @@ interactions: Content-Type: - text/event-stream; charset=utf-8 Date: - - Mon, 03 Jun 2024 20:05:52 GMT + - Tue, 04 Jun 2024 18:39:23 GMT Server: - cloudflare Transfer-Encoding: @@ -174,17 +175,17 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '2' + - '1' anthropic-ratelimit-requests-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T20:05:57Z' + - '2024-06-04T18:39:57Z' request-id: - - req_01DBURoYEwEGrcb7WMjs6xMx + - req_01JM4P2W8ahNQBkqK6giWoMM via: - 1.1 google status: diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml index 3cadcbfe185..55dd4a3a5fc 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml @@ -42,21 +42,21 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA2xT227bMAz9FYKPg5PFSRY0xjCgxbZ2exjQIi/dMgSczMZaZckVqbRZkH8f5KBJ - d3kyxMvhOYf0Dm2NFbayXo3K2eIprbc/P9eXd7+aydftbRk+XFssULcd5yoWoTVjgTG4HCARK0pe - scA21OywQuMo1TyYDEKXZDAejaej8XiOBZrglb1i9W33DKj8lFv7T4VvtbH+3vr1u6VfBCAvjxxB - GyvwkFjUBl+ANgyGnEmONETQEBxY6cNtEIXIjjfkFcR6w2AVDHnoON6F2AJFq03Lag2EjiNlTAFn - 7xna5NR2zpo+OITFfwZFfkg2sgBl+LVj6ChSy8qxWvoB8FMXWcQGX/X9LWnDLak15F4kQQPwhlwi - 5aVf+lyahCM0JNDFsLE110DO9bI8m+x73IL1WUTPD6zPSRuP1mTQLnSZLh9MO3I7iPlz/rM0zu4t - sZyfTcvJG3gF88l0UpazJQ4ztS8BgjZ80C9AMfPhmusCSP5eRna6tpGNuu1pfXykOIRzdzSxPvE7 - 4NKGrKMfjguQAJ8Oe4vBMNfwaLWBlvJx9Ij9Ngw5N1z6t6+Pd4P74nRbIbhVknyt/Ynnd1qNypvF - xceLhbm8uT6fp3l4f3E7n11dYYGe2tx30pM7fZcUqx2ezMPqX7Nwv/9eoGjoVpFJ+qIX8/uE8ENi - bxgrn5wrMPW/UrU7zFhpuGcvWM2mowJD0pexcna23/8GAAD//wMA9J1CmqoDAAA= + H4sIAAAAAAAAA2SSUU8bMQzHv4rlxyllvbaD9oQm9WFCY7wMIZC2m6pw59555JIjdgqs6nef0sJg + 4imK43/8+9veIjdYYi/talws2uuB4sWSzzYX1ycPyxTOL79fokF9GihnkYhtCQ3G4HLAirCo9YoG + +9CQwxJrZ1NDo+koDElGk/FkNp5MFmiwDl7JK5Y/ty8fKj1m6f4o8VQ79nfs28+Vv+oIauvq5KyG + CBqCAxaI5GhjvcI6RLBeHiiyb0E7FrhPJMrBgxVghYHiOsRe4NYK12Aja9eTcg1hoGhzphxV/pLu + E0dqYLDR9qQUpaz8COhxiCTCwZeQYZJQhIYj1eqeYIhhww01oB1Bb7Wj3irX1r3RQYWFWcxnpph+ + gg+wMNPZ1BTFcYVHcNOxo0yZm2LZC9Sh760Y+Aq19RDJSvD21j0B+zVF0C4IgY0Ev5Nopmmyz4Zb + VhDK8BqigPUHpjcYLJA9bqwjr6Ahcy3ms2eq6Wx6YKr80rm9Nr5vyb7yi2kDEp45hxhqogYeWLs8 + L3eYxrvZHVX+9OO/8eLOvK5ACG6VJC/VfhPzPa3GxRf9cVb8seffHotl2/F8vVgqX6FBb/usey2Q + lX5IiuUWX21j+d4l7na/DIqGYXVo8P/19w9C94l8TVj65JzBtN/4cnuosdJwR16wPJ6NDYakb2PF + 8clu9xcAAP//AwDqxmMHUQMAAA== headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e9ec12ece64222-EWR + - 88ea0c10fe0043dd-EWR Connection: - keep-alive Content-Encoding: @@ -64,7 +64,7 @@ interactions: Content-Type: - application/json Date: - - Tue, 04 Jun 2024 18:18:02 GMT + - Tue, 04 Jun 2024 18:39:50 GMT Server: - cloudflare Transfer-Encoding: @@ -72,21 +72,23 @@ interactions: anthropic-ratelimit-requests-limit: - '5' anthropic-ratelimit-requests-remaining: - - '5' + - '0' anthropic-ratelimit-requests-reset: - - '2024-06-04T18:18:57Z' + - '2024-06-04T18:39:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - - '10000' + - '9000' anthropic-ratelimit-tokens-reset: - - '2024-06-04T18:18:57Z' + - '2024-06-04T18:39:57Z' request-id: - - req_01Doh23AovTtfiFkjBhh3ZWF + - req_01LsEERKwRF6i8rW9hbFVT8a + retry-after: + - '7' via: - 1.1 google x-cloud-trace-context: - - 628a594739971f53602fac0e1b9395b7 + - d588cf3614f34cc29f44bf38cfa3371f status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml index 40bfa00d33e..13d4756a5a4 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml @@ -2,17 +2,16 @@ interactions: - request: body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": - "\nTo answer this question, the calculator tool is the most relevant - since it can perform arithmetic operations like multiplication. The calculator - tool requires a single parameter:\n- expression: The mathematical expression - to evaluate\n\nThe user has provided all the necessary information in their - question to populate this parameter. The expression to calculate is \"1984135 - * 9343116\".\n\nNo other tools are needed, as the calculator can directly answer - the question. All required parameters are available, so I can proceed with making - the tool call.\n", "type": "text"}, {"id": "toolu_01RTBFBTcGRQA9u9oDBY96HH", + "\nThe calculator tool is relevant for answering this question as + it performs basic arithmetic operations.\nRequired parameters:\n- expression: + The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". + While it contains commas, I can reasonably infer those are just used as digit + separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the + required parameters are provided, so I can proceed with calling the calculator + tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": - "toolu_01RTBFBTcGRQA9u9oDBY96HH", "content": "18538003464660"}]}], "model": + "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A simple calculator that performs basic arithmetic operations.", "input_schema": {"type": "object", "properties": {"expression": {"type": "string", "description": @@ -28,7 +27,187 @@ interactions: connection: - keep-alive content-length: - - '1362' + - '1273' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: '{"type":"error","error":{"type":"rate_limit_error","message":"Number + of requests has exceeded your per-minute rate limit (https://docs.anthropic.com/en/api/rate-limits); + see the response headers for current usage. Please try again later or contact + sales at https://www.anthropic.com/contact-sales to discuss your options for + a rate limit increase."}}' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88ea0c44d95b43dd-EWR + Connection: + - keep-alive + Content-Length: + - '350' + Content-Type: + - application/json + Date: + - Tue, 04 Jun 2024 18:39:50 GMT + Server: + - cloudflare + anthropic-ratelimit-requests-limit: + - '5' + anthropic-ratelimit-requests-remaining: + - '0' + anthropic-ratelimit-requests-reset: + - '2024-06-04T18:39:57Z' + anthropic-ratelimit-tokens-limit: + - '10000' + anthropic-ratelimit-tokens-remaining: + - '8000' + anthropic-ratelimit-tokens-reset: + - '2024-06-04T18:39:57Z' + request-id: + - req_01VRXnTtDN7bkGVjXbQNgzcL + retry-after: + - '7' + via: + - 1.1 google + x-cloud-trace-context: + - fc211fe021a43e2704a032ddb1b704cb + x-should-retry: + - 'true' + status: + code: 429 + message: Too Many Requests +- request: + body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the + result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": + "\nThe calculator tool is relevant for answering this question as + it performs basic arithmetic operations.\nRequired parameters:\n- expression: + The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". + While it contains commas, I can reasonably infer those are just used as digit + separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the + required parameters are provided, so I can proceed with calling the calculator + tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", + "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": + "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": + "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": + "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A + simple calculator that performs basic arithmetic operations.", "input_schema": + {"type": "object", "properties": {"expression": {"type": "string", "description": + "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": + ["expression"]}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '1273' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: '{"type":"error","error":{"type":"overloaded_error","message":"Overloaded"}}' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88ea0c717f55434f-EWR + Cache-Control: + - no-store, no-cache + Connection: + - keep-alive + Content-Length: + - '75' + Content-Type: + - application/json + Date: + - Tue, 04 Jun 2024 18:39:59 GMT + Server: + - cloudflare + request-id: + - req_01PTvVW3st1XAwA3EymUCdq9 + via: + - 1.1 google + x-cloud-trace-context: + - 351112042c856da4d17d2d6062cfd0a0 + x-should-retry: + - 'true' + status: + code: 529 + message: '' +- request: + body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the + result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": + "\nThe calculator tool is relevant for answering this question as + it performs basic arithmetic operations.\nRequired parameters:\n- expression: + The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". + While it contains commas, I can reasonably infer those are just used as digit + separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the + required parameters are provided, so I can proceed with calling the calculator + tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", + "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": + "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": + "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": + "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A + simple calculator that performs basic arithmetic operations.", "input_schema": + {"type": "object", "properties": {"expression": {"type": "string", "description": + "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": + ["expression"]}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '1273' content-type: - application/json host: @@ -54,16 +233,16 @@ interactions: response: body: string: !!binary | - H4sIAAAAAAAAA0yP3UrDQBCFX2U5lzKVJJuEZK+L3hVEEUQkxGSaRNPdNDMLSum7S4uCVwe+8wPn - hKmHw0GGJkkfx+G49f3uLit2L8/30/T+8LEVEPR74UuKRdqBQVjDfAGtyCTaegXhEHqe4dDNbex5 - YzdhibLJkixPsqwGoQte2Svc6+lvUPnrUr2Kw9PIK+/DymR0ZLOyxFlN2JuU6iqn1BbmxtRkc0tp - WppJTFpRYStKEkt5mVNZJrc4vxFEw9Ks3ErwcGDfNxpXj19D+BjZdwzn4zwT4vWUO2HyS9RGwyd7 - gauynBCi/me2PJ9/AAAA//8DAPgqkqEzAQAA + H4sIAAAAAAAAA0yPzUrDQBSFX2U4S5lIkklDMsuK6MqNgqBISJObNpjOpHPvgDX03SVFwdWB7/zA + WTD2sDjyvkmz+9MDP70+7nbP27chnOfxuw7DHTTkPNOaIuZ2T9AIflpByzyytE6gcfQ9TbDopjb2 + lJjEz5GTPM2LNM9raHTeCTmBfV/+BoW+1upVLF4OFGjwgbSSA6lAHCdRflCZrqtCZ2ajblStTWF0 + lpVqZJVVG1OlqSnKoizTW1w+NFj83ARq2TtYkOsbicHh12A6RXIdwbo4TRrxesguGN0cpRH/SY5h + q9xo+Cj/mckvlx8AAAD//wMAjjOTzS8BAAA= headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e9ec564ef64222-EWR + - 88ea0c83ec85434f-EWR Connection: - keep-alive Content-Encoding: @@ -71,7 +250,7 @@ interactions: Content-Type: - application/json Date: - - Tue, 04 Jun 2024 18:18:05 GMT + - Tue, 04 Jun 2024 18:40:03 GMT Server: - cloudflare Transfer-Encoding: @@ -81,19 +260,19 @@ interactions: anthropic-ratelimit-requests-remaining: - '4' anthropic-ratelimit-requests-reset: - - '2024-06-04T18:18:57Z' + - '2024-06-04T18:40:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '9000' anthropic-ratelimit-tokens-reset: - - '2024-06-04T18:18:57Z' + - '2024-06-04T18:40:57Z' request-id: - - req_01Kyz3hB1MjdDHfFRfMoReTc + - req_01MEo5gJ4zAJt1kjVqm39kG6 via: - 1.1 google x-cloud-trace-context: - - 155c7bc323534f58baa0f8149ca58e5a + - e8e88ccc3bd9fd43fefaef20d023d491 status: code: 200 message: OK diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json index d63f47cbb04..ecb97a7f031 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm.json @@ -10,30 +10,30 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496b00000000", + "_dd.p.tid": "665f5f5200000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.request.parameters": "{\"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "When Nietzsche famously declared \"God is dead\" in his", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.input_tokens": 22, "anthropic.response.usage.output_tokens": 15, - "anthropic.response.usage.total_tokens": 42, - "process_id": 62674 + "anthropic.response.usage.total_tokens": 37, + "process_id": 66314 }, - "duration": 2476000, - "start": 1717520747849359000 + "duration": 2838000, + "start": 1717526354025943000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json index 336ebf85a03..df62233867d 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json @@ -10,30 +10,30 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496c00000000", + "_dd.p.tid": "665f5f5900000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", + "anthropic.request.parameters": "{\"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "When Nietzsche famously declared \"God is dead\" in his", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 27, + "anthropic.response.usage.input_tokens": 22, "anthropic.response.usage.output_tokens": 15, - "anthropic.response.usage.total_tokens": 42, - "process_id": 62674 + "anthropic.response.usage.total_tokens": 37, + "process_id": 66314 }, - "duration": 2247000, - "start": 1717520748016945000 + "duration": 2572000, + "start": 1717526361825031000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json index f6f2993d8d6..89b9759808c 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_error.json @@ -10,23 +10,23 @@ "error": 1, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665e221e00000000", + "_dd.p.tid": "665f5f5600000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.request.parameters": "{\"max_tokens\": 15}", "error.message": "Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}", - "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 105, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 899, in create\n return self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1239, in post\n return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 921, in request\n return self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_vcrpy_pytest-asyncio_tiktoken_huggingface-hub_ai21_exceptiongroup_psutil_pytest-randomly_numexpr_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1019, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", + "error.stack": "Traceback (most recent call last):\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/ddtrace/contrib/anthropic/patch.py\", line 95, in traced_chat_model_generate\n chat_completions = func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_utils/_utils.py\", line 277, in wrapper\n return func(*args, **kwargs)\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/resources/messages.py\", line 681, in create\n return self._post(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1239, in post\n return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 921, in request\n return self._request(\n File \"/Users/william.conti/Documents/dd-trace/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_vcrpy_anthropic/lib/python3.10/site-packages/anthropic/_base_client.py\", line 1019, in _request\n raise self._make_status_error_from_response(err.response) from None\nanthropic.BadRequestError: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.0: Input does not match the expected shape.'}}\n", "error.type": "anthropic.BadRequestError", "language": "python", - "runtime-id": "b52cab756a314569a6d74fe80724c91a" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 95434 + "process_id": 66314 }, - "duration": 166228000, - "start": 1717445150258843000 + "duration": 109469000, + "start": 1717526358769596000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json index eb1807f0fcc..bc1c1c586fd 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496b00000000", + "_dd.p.tid": "665f5f5200000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", @@ -18,13 +18,13 @@ "anthropic.request.messages.0.content.1.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.request.parameters": "{\"max_tokens\": 15}", "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, @@ -34,8 +34,8 @@ "anthropic.response.usage.input_tokens": 38, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 53, - "process_id": 62674 + "process_id": 66314 }, - "duration": 2793000, - "start": 1717520747889584000 + "duration": 2317042000, + "start": 1717526354062207000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json index 5ffb3fa9431..bf53fdbbb48 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496c00000000", + "_dd.p.tid": "665f5f5900000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", @@ -18,13 +18,13 @@ "anthropic.request.messages.0.content.1.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15}", + "anthropic.request.parameters": "{\"max_tokens\": 15}", "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, @@ -34,8 +34,8 @@ "anthropic.response.usage.input_tokens": 38, "anthropic.response.usage.output_tokens": 15, "anthropic.response.usage.total_tokens": 53, - "process_id": 62674 + "process_id": 66314 }, - "duration": 2652000, - "start": 1717520748050099000 + "duration": 2782000, + "start": 1717526361853306000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json index f7fc39fcf24..b2cebc0475a 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_with_chat_history.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496b00000000", + "_dd.p.tid": "665f5f5400000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, Start all responses with your name Claude.", "anthropic.request.messages.0.content.0.type": "text", @@ -26,13 +26,13 @@ "anthropic.request.messages.2.content.1.type": "text", "anthropic.request.messages.2.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 30}", - "anthropic.response.completions.content.0.text": "Claude (2023-03-09 16:15): String theory is a theoretical framework in physics that attempts to unify quantum mechanics and gene...", + "anthropic.request.parameters": "{\"max_tokens\": 30}", + "anthropic.response.completions.content.0.text": "Claude: 4/20/2023 8:45pm \\n\\nString theory is a theoretical framework in physics that attempts to unify quantum", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, @@ -42,8 +42,8 @@ "anthropic.response.usage.input_tokens": 84, "anthropic.response.usage.output_tokens": 30, "anthropic.response.usage.total_tokens": 114, - "process_id": 62674 + "process_id": 66314 }, - "duration": 3568000, - "start": 1717520747916500000 + "duration": 2317093000, + "start": 1717526356415223000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json index 46c95fc19ba..96d209a477e 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json @@ -10,23 +10,23 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f496b00000000", + "_dd.p.tid": "665f5f5600000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"model\": \"claude-3-opus-20240229\", \"max_tokens\": 15, \"stream\": true}", + "anthropic.request.parameters": "{\"max_tokens\": 15, \"stream\": true}", "language": "python", - "runtime-id": "75b37cae2dc24d8190d27bcb14d4d263" + "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 62674 + "process_id": 66314 }, - "duration": 2040000, - "start": 1717520747965547000 + "duration": 2826079000, + "start": 1717526358926172000 }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json index 045c16a823e..187aa32f73a 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_tools.json @@ -10,33 +10,38 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f5aa900000000", + "_dd.p.tid": "665f5f7300000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "What is the result of 1,984,135 * 9,343,116?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", + "anthropic.request.messages.1.content.0.text": "\\nThe calculator tool is relevant for answering this question as it performs basic arithmetic operations.\\nRequired pa...", + "anthropic.request.messages.1.content.0.type": "text", + "anthropic.request.messages.1.content.1.type": "tool_use", + "anthropic.request.messages.1.role": "assistant", + "anthropic.request.messages.2.content.0.type": "tool_result", + "anthropic.request.messages.2.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 200}", - "anthropic.response.completions.content.0.text": "\\nTo answer this question, the calculator tool is the most relevant since it can perform arithmetic operations like mu...", + "anthropic.request.parameters": "{\"max_tokens\": 500}", + "anthropic.response.completions.content.0.text": "Therefore, the result of 1,984,135 * 9,343,116 is 18538003464660.", "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.content.1.type": "tool_use", - "anthropic.response.completions.finish_reason": "tool_use", + "anthropic.response.completions.finish_reason": "end_turn", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "505db4a9bdda41429de2cc066a67aa7c" + "runtime-id": "8afc035ea77a402b890d0a0bcdd0a51d" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 640, - "anthropic.response.usage.output_tokens": 168, - "anthropic.response.usage.total_tokens": 808, - "process_id": 1444 + "anthropic.response.usage.input_tokens": 823, + "anthropic.response.usage.output_tokens": 32, + "anthropic.response.usage.total_tokens": 855, + "process_id": 67716 }, - "duration": 24166000, - "start": 1717525161190237000 + "duration": 13042486000, + "start": 1717526387830901000 }], [ { @@ -50,36 +55,31 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "665f5aa900000000", + "_dd.p.tid": "665f5f6b00000000", "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "What is the result of 1,984,135 * 9,343,116?", "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", - "anthropic.request.messages.1.content.0.text": "\\nTo answer this question, the calculator tool is the most relevant since it can perform arithmetic operations like mu...", - "anthropic.request.messages.1.content.0.type": "text", - "anthropic.request.messages.1.content.1.type": "tool_use", - "anthropic.request.messages.1.role": "assistant", - "anthropic.request.messages.2.content.0.type": "tool_result", - "anthropic.request.messages.2.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 500}", - "anthropic.response.completions.content.0.text": "Therefore, the result of 1,984,135 * 9,343,116 is 18,538,003,464,660.", + "anthropic.request.parameters": "{\"max_tokens\": 200}", + "anthropic.response.completions.content.0.text": "\\nThe calculator tool is relevant for answering this question as it performs basic arithmetic operations.\\nRequired pa...", "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "end_turn", + "anthropic.response.completions.content.1.type": "tool_use", + "anthropic.response.completions.finish_reason": "tool_use", "anthropic.response.completions.role": "assistant", "language": "python", - "runtime-id": "505db4a9bdda41429de2cc066a67aa7c" + "runtime-id": "8afc035ea77a402b890d0a0bcdd0a51d" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 824, - "anthropic.response.usage.output_tokens": 36, - "anthropic.response.usage.total_tokens": 860, - "process_id": 1444 + "anthropic.response.usage.input_tokens": 640, + "anthropic.response.usage.output_tokens": 167, + "anthropic.response.usage.total_tokens": 807, + "process_id": 67716 }, - "duration": 5855000, - "start": 1717525161218346000 + "duration": 8320787000, + "start": 1717526379505675000 }]] From c2684c10a13d97799773d574e70ce0d078cf145f Mon Sep 17 00:00:00 2001 From: William Conti Date: Wed, 5 Jun 2024 09:51:30 -0400 Subject: [PATCH 21/33] add system prompt --- ddtrace/llmobs/_integrations/anthropic.py | 9 ++++--- .../cassettes/anthropic_hello_world.yaml | 26 +++++++++---------- .../anthropic/test_anthropic_llmobs.py | 10 ++++--- 3 files changed, 25 insertions(+), 20 deletions(-) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 05aa12dad3b..6f399254f2d 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -62,8 +62,9 @@ def llmobs_set_tags( "temperature": float(kwargs.get("temperature", 1.0)), "max_tokens": float(kwargs.get("max_tokens", 0)), } - messages = get_argument_value(args, kwargs, 0, "messages") - input_messages = self._extract_input_message(messages) + messages = get_argument_value([], kwargs, 0, "messages") + system_prompt = get_argument_value([], kwargs, 0, "system") + input_messages = self._extract_input_message(messages, system_prompt) span.set_tag_str(SPAN_KIND, "llm") span.set_tag_str(MODEL_NAME, span.get_tag("anthropic.request.model") or "") @@ -78,7 +79,7 @@ def llmobs_set_tags( span.set_tag_str(METRICS, json.dumps(_get_llmobs_metrics_tags(span))) - def _extract_input_message(self, messages): + def _extract_input_message(self, messages, system_prompt=None): """Extract input messages from the stored prompt. Anthropic allows for messages and multiple texts in a message, which requires some special casing. """ @@ -86,6 +87,8 @@ def _extract_input_message(self, messages): log.warning("Anthropic input must be a list of messages.") input_messages = [] + if system_prompt is not None: + input_messages.append({"content": system_prompt, "role": "system"}) for message in messages: if not isinstance(message, dict): log.warning("Anthropic message input must be a list of message param dicts.") diff --git a/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml b/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml index ecc04fc5621..35b7345677f 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml @@ -4,7 +4,7 @@ interactions: "text": "Reply: ''Hello World!'' when I say: ''Hello''"}, {"type": "text", "text": "Hello"}]}, {"role": "assistant", "content": "Hello World!"}, {"role": "user", "content": [{"type": "text", "text": "Hello"}]}], "model": "claude-3-opus-20240229", - "temperature": 0.8}' + "system": "Respond in all caps everytime.", "temperature": 0.8}' headers: accept: - application/json @@ -15,7 +15,7 @@ interactions: connection: - keep-alive content-length: - - '340' + - '384' content-type: - application/json host: @@ -35,21 +35,21 @@ interactions: x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.9 method: POST uri: https://api.anthropic.com/v1/messages response: body: string: !!binary | - H4sIAAAAAAAAA0yOzWrDMBCEX6WdswyuHArRrRBI6TGXHkIxxtoEE3nX1a5CgvG7F4cWehr45oeZ - MUQEjHpu65f97uO+yftyulxPh21zGMvV797gYPeJ1hSpdmeCQ5a0gk51UOvY4DBKpISAPnUlUtVU - MhWtfO03tfdbOPTCRmwIx/lv0Oi2Vh8S8E4pydOn5BSfsXw5qMnUZupUGAHEsbWSGb+G0nch7gmB - S0oO5fEtzBh4KtaaXIgVoWkcpNh/9LosPwAAAP//AwDPtjn1+AAAAA== + H4sIAAAAAAAAA0yOzUrEQBCEX0XrPIHsuLBmjouCh8CyXkREwphpYnTSk6R7QAl5d8mi4Kngqx9q + QR/gMEjXlDtbvR868h/n0B2m8/GtGp7b4wQD/R5pS5GI7wgGc4ob8CK9qGeFwZACRTi00edAxU2R + xiyFLe2+tLaCQZtYiRXuZfkbVPraqhdxeLiv69PV0+mxvrvG+mogmsZmJi+J4UAcGs0z49cQmjJx + S3CcYzTIl29uQc9j1kbTJ7HA7XcGKet/dLuuPwAAAP//AwA1yXoc+AAAAA== headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88e1b7e91ae042d0-EWR + - 88f09dc5dffe421d-EWR Connection: - keep-alive Content-Encoding: @@ -57,7 +57,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 03 Jun 2024 18:24:10 GMT + - Wed, 05 Jun 2024 13:47:46 GMT Server: - cloudflare Transfer-Encoding: @@ -67,19 +67,19 @@ interactions: anthropic-ratelimit-requests-remaining: - '4' anthropic-ratelimit-requests-reset: - - '2024-06-03T18:24:57Z' + - '2024-06-05T13:47:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-03T18:24:57Z' + - '2024-06-05T13:47:57Z' request-id: - - req_01Ey5yndaLUmUmn1A6YDSJrr + - req_01BJ7GJG1YzsYSY3xUVoaNaX via: - 1.1 google x-cloud-trace-context: - - fd17395b60d4b6d19c95418c5797b164 + - 2810149c979072b48cbc451ef622b3a8 status: code: 200 message: OK diff --git a/tests/contrib/anthropic/test_anthropic_llmobs.py b/tests/contrib/anthropic/test_anthropic_llmobs.py index a529e8bd7c3..ed8ad59dd5a 100644 --- a/tests/contrib/anthropic/test_anthropic_llmobs.py +++ b/tests/contrib/anthropic/test_anthropic_llmobs.py @@ -16,6 +16,7 @@ def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, with request_vcr.use_cassette("anthropic_hello_world.yaml"): llm.messages.create( model="claude-3-opus-20240229", + system="Respond in all caps everytime.", max_tokens=15, messages=[ { @@ -31,7 +32,7 @@ def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, }, ], }, - {"role": "assistant", "content": "Hello World!"}, + {"role": "assistant", "content": "HELLO WORLD!"}, { "role": "user", "content": [ @@ -52,14 +53,15 @@ def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, model_name="claude-3-opus-20240229", model_provider="anthropic", input_messages=[ + {"content": "Respond in all caps everytime.", "role": "system"}, {"content": "Reply: 'Hello World!' when I say: 'Hello'", "role": "user"}, {"content": "Hello", "role": "user"}, - {"content": "Hello World!", "role": "assistant"}, + {"content": "HELLO WORLD!", "role": "assistant"}, {"content": "Hello", "role": "user"}, ], - output_messages=[{"content": "Hello World!", "role": "assistant"}], + output_messages=[{"content": "HELLO WORLD!", "role": "assistant"}], metadata={"temperature": 0.8, "max_tokens": 15}, - token_metrics={"input_tokens": 33, "output_tokens": 6, "total_tokens": 39}, + token_metrics={"input_tokens": 41, "output_tokens": 8, "total_tokens": 49}, tags={"ml_app": ""}, ) ) From 1d3044e982683273bc5e40c86458a1a1ca1799ed Mon Sep 17 00:00:00 2001 From: William Conti Date: Wed, 5 Jun 2024 11:26:33 -0400 Subject: [PATCH 22/33] small fixes --- ddtrace/llmobs/_integrations/anthropic.py | 12 +++++++++--- tests/contrib/anthropic/test_anthropic.py | 1 - 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 261e0e60356..f19bd96bdd6 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -6,7 +6,6 @@ from typing import Optional from ddtrace._trace.span import Span -from ddtrace.contrib.anthropic.utils import _get_attr from ddtrace.internal.logger import get_logger from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._constants import INPUT_MESSAGES @@ -54,7 +53,6 @@ def llmobs_set_tags( kwargs: Dict[str, Any], err: Optional[Any] = None, ) -> None: - """Extract prompt/response tags from a completion and set them as temporary "_ml_obs.*" tags.""" if not self.llmobs_enabled: return @@ -63,7 +61,7 @@ def llmobs_set_tags( "max_tokens": float(kwargs.get("max_tokens", 0)), } messages = get_argument_value([], kwargs, 0, "messages") - system_prompt = get_argument_value([], kwargs, 0, "system") + system_prompt = get_argument_value([], kwargs, 0, "system", optional=True) input_messages = self._extract_input_message(messages, system_prompt) span.set_tag_str(SPAN_KIND, "llm") @@ -150,3 +148,11 @@ def _get_llmobs_metrics_tags(cls, span): "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), } + + +def _get_attr(o: Any, attr: str, default: Any): + # Since our response may be a dict or object, convenience method + if isinstance(o, dict): + return o.get(attr, default) + else: + return getattr(o, attr, default) diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 1020319549d..e42fcb699be 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -151,7 +151,6 @@ def test_anthropic_llm_sync_stream(anthropic, request_vcr): token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_helper", ignores=["resource"] ) def test_anthropic_llm_sync_stream_helper(anthropic, request_vcr): - llm = anthropic.Anthropic() with request_vcr.use_cassette("anthropic_completion_stream_helper.yaml"): with llm.messages.stream( From 0fc729789453d7fa5adb90d35f255bf3783191c1 Mon Sep 17 00:00:00 2001 From: William Conti Date: Wed, 5 Jun 2024 11:28:20 -0400 Subject: [PATCH 23/33] small fix --- ddtrace/llmobs/_integrations/anthropic.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 6f399254f2d..de58139945a 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -6,7 +6,6 @@ from typing import Optional from ddtrace._trace.span import Span -from ddtrace.contrib.anthropic.utils import _get_attr from ddtrace.internal.logger import get_logger from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._constants import INPUT_MESSAGES @@ -63,7 +62,7 @@ def llmobs_set_tags( "max_tokens": float(kwargs.get("max_tokens", 0)), } messages = get_argument_value([], kwargs, 0, "messages") - system_prompt = get_argument_value([], kwargs, 0, "system") + system_prompt = get_argument_value([], kwargs, 0, "system", optional=True) input_messages = self._extract_input_message(messages, system_prompt) span.set_tag_str(SPAN_KIND, "llm") @@ -150,3 +149,11 @@ def _get_llmobs_metrics_tags(span): "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), } + + +def _get_attr(o: Any, attr: str, default: Any): + # Since our response may be a dict or object, convenience method + if isinstance(o, dict): + return o.get(attr, default) + else: + return getattr(o, attr, default) From f657c8ffd74d083182083325f0bff69098c0b823 Mon Sep 17 00:00:00 2001 From: William Conti Date: Wed, 5 Jun 2024 13:03:05 -0400 Subject: [PATCH 24/33] changes --- ddtrace/contrib/anthropic/patch.py | 4 - ddtrace/llmobs/_integrations/anthropic.py | 45 +-- .../anthropic_completion_invalid_api_key.yaml | 70 +++++ .../anthropic_completion_multi_prompt.yaml | 34 +-- .../anthropic_completion_stream.yaml | 2 +- .../anthropic_completion_tools_part_1.yaml | 95 ------ .../anthropic_completion_tools_part_2.yaml | 279 ------------------ tests/contrib/anthropic/test_anthropic.py | 6 +- .../anthropic/test_anthropic_llmobs.py | 88 ++++-- ...c.test_anthropic_llm_multiple_prompts.json | 10 +- ...ropic_llm_multiple_prompts_no_history.json | 41 --- 11 files changed, 182 insertions(+), 492 deletions(-) create mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_invalid_api_key.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index e2f93ce1942..632d2aa5235 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -101,8 +101,6 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) - if integration.is_pc_sampled_llmobs(span): - integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: if integration.is_pc_sampled_llmobs(span): @@ -178,8 +176,6 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, handle_non_streamed_response(integration, chat_completions, args, kwargs, span) except Exception: span.set_exc_info(*sys.exc_info()) - if integration.is_pc_sampled_llmobs(span): - integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) raise finally: if integration.is_pc_sampled_llmobs(span): diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index de58139945a..651ede9c2ce 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -7,7 +7,6 @@ from ddtrace._trace.span import Span from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS @@ -61,8 +60,8 @@ def llmobs_set_tags( "temperature": float(kwargs.get("temperature", 1.0)), "max_tokens": float(kwargs.get("max_tokens", 0)), } - messages = get_argument_value([], kwargs, 0, "messages") - system_prompt = get_argument_value([], kwargs, 0, "system", optional=True) + messages = kwargs.get("messages") + system_prompt = kwargs.get("system") input_messages = self._extract_input_message(messages, system_prompt) span.set_tag_str(SPAN_KIND, "llm") @@ -76,7 +75,9 @@ def llmobs_set_tags( output_messages = self._extract_output_message(resp) span.set_tag_str(OUTPUT_MESSAGES, json.dumps(output_messages)) - span.set_tag_str(METRICS, json.dumps(_get_llmobs_metrics_tags(span))) + usage = AnthropicIntegration._get_llmobs_metrics_tags(span) + if usage != {}: + span.set_tag_str(METRICS, json.dumps(usage)) def _extract_input_message(self, messages, system_prompt=None): """Extract input messages from the stored prompt. @@ -133,22 +134,30 @@ def _extract_output_message(self, response): def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: if not usage: return - input_tokens = _get_attr(usage, "input_tokens", None) - output_tokens = _get_attr(usage, "output_tokens", None) - - span.set_metric("anthropic.response.usage.input_tokens", input_tokens) - span.set_metric("anthropic.response.usage.output_tokens", output_tokens) - - if input_tokens is not None and output_tokens is not None: + input_tokens = _get_attr(usage, "input_tokens", 0) + output_tokens = _get_attr(usage, "output_tokens", 0) + + if input_tokens != 0: + span.set_metric("anthropic.response.usage.input_tokens", input_tokens) + if output_tokens != 0: + span.set_metric("anthropic.response.usage.output_tokens", output_tokens) + if input_tokens != 0 and output_tokens != 0: span.set_metric("anthropic.response.usage.total_tokens", input_tokens + output_tokens) - -def _get_llmobs_metrics_tags(span): - return { - "input_tokens": span.get_metric("anthropic.response.usage.input_tokens"), - "output_tokens": span.get_metric("anthropic.response.usage.output_tokens"), - "total_tokens": span.get_metric("anthropic.response.usage.total_tokens"), - } + @classmethod + def _get_llmobs_metrics_tags(cls, span): + usage = {} + prompt_tokens = span.get_metric("anthropic.response.usage.input_tokens") + completion_tokens = span.get_metric("anthropic.response.usage.output_tokens") + total_tokens = span.get_metric("anthropic.response.usage.total_tokens") + + if prompt_tokens is not None: + usage["prompt_tokens"] = prompt_tokens + if completion_tokens is not None: + usage["completion_tokens"] = completion_tokens + if total_tokens is not None: + usage["total_tokens"] = total_tokens + return usage def _get_attr(o: Any, attr: str, default: Any): diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_invalid_api_key.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_invalid_api_key.yaml new file mode 100644 index 00000000000..1723c1368a4 --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_invalid_api_key.yaml @@ -0,0 +1,70 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, I am looking for information about some books!"}, {"type": "text", + "text": "What is the best selling book?"}]}], "model": "claude-3-opus-20240229", + "system": "Respond only in all caps.", "temperature": 0.8}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '300' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.28.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.28.0 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.9 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: '{"type":"error","error":{"type":"authentication_error","message":"invalid + x-api-key"}}' + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 88f189dac80ac32b-EWR + Connection: + - keep-alive + Content-Length: + - '86' + Content-Type: + - application/json + Date: + - Wed, 05 Jun 2024 16:28:54 GMT + Server: + - cloudflare + request-id: + - req_013JyjhVcnhy8mfJkvBTqMNB + via: + - 1.1 google + x-cloud-trace-context: + - 93ac98996f397cc0399d31159d38f4bb + x-should-retry: + - 'false' + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml index c79af1d1917..fa9b49e5396 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_multi_prompt.yaml @@ -2,8 +2,8 @@ interactions: - request: body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", "text": "Hello, I am looking for information about some books!"}, {"type": "text", - "text": "Can you explain what Descartes meant by ''I think, therefore I am''?"}]}], - "model": "claude-3-opus-20240229", "system": "Respond only in all caps."}' + "text": "What is the best selling book?"}]}], "model": "claude-3-opus-20240229", + "system": "Respond only in all caps.", "temperature": 0.8}' headers: accept: - application/json @@ -14,13 +14,13 @@ interactions: connection: - keep-alive content-length: - - '316' + - '300' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.26.1 + - Anthropic/Python 0.28.0 x-stainless-arch: - arm64 x-stainless-async: @@ -30,26 +30,26 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.26.1 + - 0.28.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.9 method: POST uri: https://api.anthropic.com/v1/messages response: body: string: !!binary | - H4sIAAAAAAAAA0yOQUvDQBSE/0qYi5cNpLFR3FvUSEuphqb1UJWwJM8a3OzG7ltpCfnvktKCp4GZ - b4bp0dSQaN2ujCbJ/etxc7NVy23++ftVL27znNsEAnzsaKTIObUjCOytHg3lXONYGYZAa2vSkKi0 - 8jWF16HtvAvjKJ5GcXwHgcoaJsOQb/1lkOkwVk8i8ZgVD+lqnRVXwVO6fNkUQT5bpUUWvGMerGfz - 54XA8CHg2HblnpSzZjylDiXbbzIO58jRjydTEaTxWgv402nZozGd5wssp4mA9fzfmiTD8AcAAP// - AwCozOzqEgEAAA== + H4sIAAAAAAAAA0yOYUuEQBiE/8oyn1fw9ipovyXZZWdKKBFUiOnbIemud+8ueMj99/DooE8DM88M + M6NroTHwrgpXZfTinqJvNWz3X69vPBXHbJPdQMIdR1ooYq53BImD7RejZu7Y1cZBYrAt9dBo+tq3 + FKwDO3oOVKiuQqVuIdFY48g46Pf5MuhoWqpn0SgfYxHFRRkUcZom2UZEeb4V+YO4S1NRJs+xSArx + gfs8w+lTgp0dqwPVbM3yrZ4qZ3/IMP4ipr0n0xC08X0v4c/f9YzOjN5dYL1WEta7/9bq+nT6BQAA + //8DAMSYrqgZAQAA headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88ea9acec90172b7-EWR + - 88f16344983e1861-EWR Connection: - keep-alive Content-Encoding: @@ -57,7 +57,7 @@ interactions: Content-Type: - application/json Date: - - Tue, 04 Jun 2024 20:17:11 GMT + - Wed, 05 Jun 2024 16:02:36 GMT Server: - cloudflare Transfer-Encoding: @@ -67,19 +67,19 @@ interactions: anthropic-ratelimit-requests-remaining: - '4' anthropic-ratelimit-requests-reset: - - '2024-06-04T20:17:57Z' + - '2024-06-05T16:02:57Z' anthropic-ratelimit-tokens-limit: - '10000' anthropic-ratelimit-tokens-remaining: - '10000' anthropic-ratelimit-tokens-reset: - - '2024-06-04T20:17:57Z' + - '2024-06-05T16:02:57Z' request-id: - - req_01PDCp5gcfpzQ4P5NAdtXfrU + - req_01Bd7D9NJM29LYXW5VTm99rv via: - 1.1 google x-cloud-trace-context: - - 609af05f60c212e11bbb86f767f6f1b0 + - 07d6532b3235336a58f4f8d0baffd032 status: code: 200 message: OK diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml index 06baa0cb61c..b08fe741d72 100644 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml +++ b/tests/contrib/anthropic/cassettes/anthropic_completion_stream.yaml @@ -191,4 +191,4 @@ interactions: status: code: 200 message: OK -version: 1 +version: 1 \ No newline at end of file diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml deleted file mode 100644 index 55dd4a3a5fc..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_1.yaml +++ /dev/null @@ -1,95 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 200, "messages": [{"role": "user", "content": "What is the - result of 1,984,135 * 9,343,116?"}], "model": "claude-3-opus-20240229", "tools": - [{"name": "calculator", "description": "A simple calculator that performs basic - arithmetic operations.", "input_schema": {"type": "object", "properties": {"expression": - {"type": "string", "description": "The mathematical expression to evaluate (e.g., - ''2 + 3 * 4'')."}}, "required": ["expression"]}}]}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '454' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA2SSUU8bMQzHv4rlxyllvbaD9oQm9WFCY7wMIZC2m6pw59555JIjdgqs6nef0sJg - 4imK43/8+9veIjdYYi/talws2uuB4sWSzzYX1ycPyxTOL79fokF9GihnkYhtCQ3G4HLAirCo9YoG - +9CQwxJrZ1NDo+koDElGk/FkNp5MFmiwDl7JK5Y/ty8fKj1m6f4o8VQ79nfs28+Vv+oIauvq5KyG - CBqCAxaI5GhjvcI6RLBeHiiyb0E7FrhPJMrBgxVghYHiOsRe4NYK12Aja9eTcg1hoGhzphxV/pLu - E0dqYLDR9qQUpaz8COhxiCTCwZeQYZJQhIYj1eqeYIhhww01oB1Bb7Wj3irX1r3RQYWFWcxnpph+ - gg+wMNPZ1BTFcYVHcNOxo0yZm2LZC9Sh760Y+Aq19RDJSvD21j0B+zVF0C4IgY0Ev5Nopmmyz4Zb - VhDK8BqigPUHpjcYLJA9bqwjr6Ahcy3ms2eq6Wx6YKr80rm9Nr5vyb7yi2kDEp45hxhqogYeWLs8 - L3eYxrvZHVX+9OO/8eLOvK5ACG6VJC/VfhPzPa3GxRf9cVb8seffHotl2/F8vVgqX6FBb/usey2Q - lX5IiuUWX21j+d4l7na/DIqGYXVo8P/19w9C94l8TVj65JzBtN/4cnuosdJwR16wPJ6NDYakb2PF - 8clu9xcAAP//AwDqxmMHUQMAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88ea0c10fe0043dd-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Tue, 04 Jun 2024 18:39:50 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '0' - anthropic-ratelimit-requests-reset: - - '2024-06-04T18:39:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '9000' - anthropic-ratelimit-tokens-reset: - - '2024-06-04T18:39:57Z' - request-id: - - req_01LsEERKwRF6i8rW9hbFVT8a - retry-after: - - '7' - via: - - 1.1 google - x-cloud-trace-context: - - d588cf3614f34cc29f44bf38cfa3371f - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml deleted file mode 100644 index 13d4756a5a4..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_tools_part_2.yaml +++ /dev/null @@ -1,279 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the - result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": - "\nThe calculator tool is relevant for answering this question as - it performs basic arithmetic operations.\nRequired parameters:\n- expression: - The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". - While it contains commas, I can reasonably infer those are just used as digit - separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the - required parameters are provided, so I can proceed with calling the calculator - tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", - "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": - "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": - "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": - "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A - simple calculator that performs basic arithmetic operations.", "input_schema": - {"type": "object", "properties": {"expression": {"type": "string", "description": - "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": - ["expression"]}}]}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '1273' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: '{"type":"error","error":{"type":"rate_limit_error","message":"Number - of requests has exceeded your per-minute rate limit (https://docs.anthropic.com/en/api/rate-limits); - see the response headers for current usage. Please try again later or contact - sales at https://www.anthropic.com/contact-sales to discuss your options for - a rate limit increase."}}' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88ea0c44d95b43dd-EWR - Connection: - - keep-alive - Content-Length: - - '350' - Content-Type: - - application/json - Date: - - Tue, 04 Jun 2024 18:39:50 GMT - Server: - - cloudflare - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '0' - anthropic-ratelimit-requests-reset: - - '2024-06-04T18:39:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '8000' - anthropic-ratelimit-tokens-reset: - - '2024-06-04T18:39:57Z' - request-id: - - req_01VRXnTtDN7bkGVjXbQNgzcL - retry-after: - - '7' - via: - - 1.1 google - x-cloud-trace-context: - - fc211fe021a43e2704a032ddb1b704cb - x-should-retry: - - 'true' - status: - code: 429 - message: Too Many Requests -- request: - body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the - result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": - "\nThe calculator tool is relevant for answering this question as - it performs basic arithmetic operations.\nRequired parameters:\n- expression: - The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". - While it contains commas, I can reasonably infer those are just used as digit - separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the - required parameters are provided, so I can proceed with calling the calculator - tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", - "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": - "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": - "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": - "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A - simple calculator that performs basic arithmetic operations.", "input_schema": - {"type": "object", "properties": {"expression": {"type": "string", "description": - "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": - ["expression"]}}]}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '1273' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: '{"type":"error","error":{"type":"overloaded_error","message":"Overloaded"}}' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88ea0c717f55434f-EWR - Cache-Control: - - no-store, no-cache - Connection: - - keep-alive - Content-Length: - - '75' - Content-Type: - - application/json - Date: - - Tue, 04 Jun 2024 18:39:59 GMT - Server: - - cloudflare - request-id: - - req_01PTvVW3st1XAwA3EymUCdq9 - via: - - 1.1 google - x-cloud-trace-context: - - 351112042c856da4d17d2d6062cfd0a0 - x-should-retry: - - 'true' - status: - code: 529 - message: '' -- request: - body: '{"max_tokens": 500, "messages": [{"role": "user", "content": "What is the - result of 1,984,135 * 9,343,116?"}, {"role": "assistant", "content": [{"text": - "\nThe calculator tool is relevant for answering this question as - it performs basic arithmetic operations.\nRequired parameters:\n- expression: - The user directly provided the mathematical expression \"1,984,135 * 9,343,116\". - While it contains commas, I can reasonably infer those are just used as digit - separators and the expression is equivalent to \"1984135 * 9343116\".\nAll the - required parameters are provided, so I can proceed with calling the calculator - tool.\n", "type": "text"}, {"id": "toolu_01EtZG1zaJKx1Aghi8f9AtiT", - "input": {"expression": "1984135 * 9343116"}, "name": "calculator", "type": - "tool_use"}]}, {"role": "user", "content": [{"type": "tool_result", "tool_use_id": - "toolu_01EtZG1zaJKx1Aghi8f9AtiT", "content": "18538003464660"}]}], "model": - "claude-3-opus-20240229", "tools": [{"name": "calculator", "description": "A - simple calculator that performs basic arithmetic operations.", "input_schema": - {"type": "object", "properties": {"expression": {"type": "string", "description": - "The mathematical expression to evaluate (e.g., ''2 + 3 * 4'')."}}, "required": - ["expression"]}}]}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '1273' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0yPzUrDQBSFX2U4S5lIkklDMsuK6MqNgqBISJObNpjOpHPvgDX03SVFwdWB7/zA - WTD2sDjyvkmz+9MDP70+7nbP27chnOfxuw7DHTTkPNOaIuZ2T9AIflpByzyytE6gcfQ9TbDopjb2 - lJjEz5GTPM2LNM9raHTeCTmBfV/+BoW+1upVLF4OFGjwgbSSA6lAHCdRflCZrqtCZ2ajblStTWF0 - lpVqZJVVG1OlqSnKoizTW1w+NFj83ARq2TtYkOsbicHh12A6RXIdwbo4TRrxesguGN0cpRH/SY5h - q9xo+Cj/mckvlx8AAAD//wMAjjOTzS8BAAA= - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88ea0c83ec85434f-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Tue, 04 Jun 2024 18:40:03 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '4' - anthropic-ratelimit-requests-reset: - - '2024-06-04T18:40:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '9000' - anthropic-ratelimit-tokens-reset: - - '2024-06-04T18:40:57Z' - request-id: - - req_01MEo5gJ4zAJt1kjVqm39kG6 - via: - - 1.1 google - x-cloud-trace-context: - - e8e88ccc3bd9fd43fefaef20d023d491 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index 6de89c87e3a..a619c6a3cf0 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -67,12 +67,13 @@ def test_anthropic_llm_sync_multiple_prompts(anthropic, request_vcr): model="claude-3-opus-20240229", max_tokens=15, system="Respond only in all caps.", + temperature=0.8, messages=[ { "role": "user", "content": [ {"type": "text", "text": "Hello, I am looking for information about some books!"}, - {"type": "text", "text": "Can you explain what Descartes meant by 'I think, therefore I am'?"}, + {"type": "text", "text": "What is the best selling book?"}, ], } ], @@ -227,6 +228,7 @@ async def test_anthropic_llm_async_multiple_prompts(anthropic, request_vcr, snap model="claude-3-opus-20240229", max_tokens=15, system="Respond only in all caps.", + temperature=0.8, messages=[ { "role": "user", @@ -234,7 +236,7 @@ async def test_anthropic_llm_async_multiple_prompts(anthropic, request_vcr, snap {"type": "text", "text": "Hello, I am looking for information about some books!"}, { "type": "text", - "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "text": "What is the best selling book?", }, ], } diff --git a/tests/contrib/anthropic/test_anthropic_llmobs.py b/tests/contrib/anthropic/test_anthropic_llmobs.py index ed8ad59dd5a..945796657fb 100644 --- a/tests/contrib/anthropic/test_anthropic_llmobs.py +++ b/tests/contrib/anthropic/test_anthropic_llmobs.py @@ -13,37 +13,21 @@ def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. """ llm = anthropic.Anthropic() - with request_vcr.use_cassette("anthropic_hello_world.yaml"): + with request_vcr.use_cassette("anthropic_completion_multi_prompt.yaml"): llm.messages.create( model="claude-3-opus-20240229", - system="Respond in all caps everytime.", max_tokens=15, + system="Respond only in all caps.", + temperature=0.8, messages=[ { "role": "user", "content": [ - { - "type": "text", - "text": "Reply: 'Hello World!' when I say: 'Hello'", - }, - { - "type": "text", - "text": "Hello", - }, - ], - }, - {"role": "assistant", "content": "HELLO WORLD!"}, - { - "role": "user", - "content": [ - { - "type": "text", - "text": "Hello", - } + {"type": "text", "text": "Hello, I am looking for information about some books!"}, + {"type": "text", "text": "What is the best selling book?"}, ], - }, + } ], - temperature=0.8, ) span = mock_tracer.pop_traces()[0][0] assert mock_llmobs_writer.enqueue.call_count == 1 @@ -53,15 +37,59 @@ def test_completion(self, anthropic, ddtrace_global_config, mock_llmobs_writer, model_name="claude-3-opus-20240229", model_provider="anthropic", input_messages=[ - {"content": "Respond in all caps everytime.", "role": "system"}, - {"content": "Reply: 'Hello World!' when I say: 'Hello'", "role": "user"}, - {"content": "Hello", "role": "user"}, - {"content": "HELLO WORLD!", "role": "assistant"}, - {"content": "Hello", "role": "user"}, + {"content": "Respond only in all caps.", "role": "system"}, + {"content": "Hello, I am looking for information about some books!", "role": "user"}, + {"content": "What is the best selling book?", "role": "user"}, + ], + output_messages=[{"content": 'THE BEST-SELLING BOOK OF ALL TIME IS "DON', "role": "assistant"}], + metadata={"temperature": 0.8, "max_tokens": 15.0}, + token_metrics={"prompt_tokens": 32, "completion_tokens": 15, "total_tokens": 47}, + tags={"ml_app": ""}, + ) + ) + + def test_error(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): + """Ensure llmobs records are emitted for completion endpoints when configured and there is an error. + + Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. + """ + llm = anthropic.Anthropic(api_key="invalid_api_key") + with request_vcr.use_cassette("anthropic_completion_invalid_api_key.yaml"): + try: + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + system="Respond only in all caps.", + temperature=0.8, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello, I am looking for information about some books!"}, + {"type": "text", "text": "What is the best selling book?"}, + ], + } + ], + ) + except Exception: + pass + span = mock_tracer.pop_traces()[0][0] + assert mock_llmobs_writer.enqueue.call_count == 1 + mock_llmobs_writer.enqueue.assert_called_with( + _expected_llmobs_llm_span_event( + span, + model_name="claude-3-opus-20240229", + model_provider="anthropic", + input_messages=[ + {"content": "Respond only in all caps.", "role": "system"}, + {"content": "Hello, I am looking for information about some books!", "role": "user"}, + {"content": "What is the best selling book?", "role": "user"}, ], - output_messages=[{"content": "HELLO WORLD!", "role": "assistant"}], - metadata={"temperature": 0.8, "max_tokens": 15}, - token_metrics={"input_tokens": 41, "output_tokens": 8, "total_tokens": 49}, + output_messages=[{"content": ""}], + error="anthropic.AuthenticationError", + error_message=span.get_tag("error.message"), + error_stack=span.get_tag("error.stack"), + metadata={"temperature": 0.8, "max_tokens": 15.0}, tags={"ml_app": ""}, ) ) diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json index c270e7a2473..aebc2405be8 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts.json @@ -14,13 +14,13 @@ "anthropic.request.api_key": "sk-...key>", "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "anthropic.request.messages.0.content.1.text": "What is the best selling book?", "anthropic.request.messages.0.content.1.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 15}", + "anthropic.request.parameters": "{\"max_tokens\": 15, \"temperature\": 0.8}", "anthropic.request.system": "Respond only in all caps.", - "anthropic.response.completions.content.0.text": "DESCARTES' FAMOUS PHRASE \"I THINK,", + "anthropic.response.completions.content.0.text": "THE BEST-SELLING BOOK OF ALL TIME IS \"DON", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", "anthropic.response.completions.role": "assistant", @@ -32,9 +32,9 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 45, + "anthropic.response.usage.input_tokens": 32, "anthropic.response.usage.output_tokens": 15, - "anthropic.response.usage.total_tokens": 60, + "anthropic.response.usage.total_tokens": 47, "process_id": 98153 }, "duration": 24102000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json deleted file mode 100644 index bf53fdbbb48..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_multiple_prompts_no_history.json +++ /dev/null @@ -1,41 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.create", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665f5f5900000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.messages.0.content.0.text": "Hello, I am looking for information about some books!", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.content.1.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - "anthropic.request.messages.0.content.1.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 15}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", - "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "max_tokens", - "anthropic.response.completions.role": "assistant", - "language": "python", - "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 38, - "anthropic.response.usage.output_tokens": 15, - "anthropic.response.usage.total_tokens": 53, - "process_id": 66314 - }, - "duration": 2782000, - "start": 1717526361853306000 - }]] From f63c2e4c24e2d0b91bd0f06b834551a8a89b425d Mon Sep 17 00:00:00 2001 From: William Conti Date: Wed, 5 Jun 2024 13:19:09 -0400 Subject: [PATCH 25/33] add release note --- .../add-anthropic-llm-observability-27e914a3a23b5001.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 releasenotes/notes/add-anthropic-llm-observability-27e914a3a23b5001.yaml diff --git a/releasenotes/notes/add-anthropic-llm-observability-27e914a3a23b5001.yaml b/releasenotes/notes/add-anthropic-llm-observability-27e914a3a23b5001.yaml new file mode 100644 index 00000000000..6c2628a2b3a --- /dev/null +++ b/releasenotes/notes/add-anthropic-llm-observability-27e914a3a23b5001.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Anthropic: Add LLM Observability support for Anthropic messaging. From bf0b52137c1b179746d5d11476826c1f3dc71624 Mon Sep 17 00:00:00 2001 From: William Conti Date: Thu, 6 Jun 2024 10:44:32 -0400 Subject: [PATCH 26/33] remove unnecessary lock files --- .riot/requirements/1f1413a.txt | 62 ---------------------------------- .riot/requirements/ceb0f20.txt | 48 -------------------------- 2 files changed, 110 deletions(-) delete mode 100644 .riot/requirements/1f1413a.txt delete mode 100644 .riot/requirements/ceb0f20.txt diff --git a/.riot/requirements/1f1413a.txt b/.riot/requirements/1f1413a.txt deleted file mode 100644 index f8258a6316f..00000000000 --- a/.riot/requirements/1f1413a.txt +++ /dev/null @@ -1,62 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1f1413a.in -# -ai21==2.4.1 -ai21-tokenizer==0.9.1 -annotated-types==0.7.0 -anthropic==0.28.0 -anyio==4.4.0 -attrs==23.2.0 -certifi==2024.6.2 -charset-normalizer==3.3.2 -coverage[toml]==7.5.3 -dataclasses-json==0.6.6 -distro==1.9.0 -exceptiongroup==1.2.1 -filelock==3.14.0 -fsspec==2024.5.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -huggingface-hub==0.23.2 -hypothesis==6.45.0 -idna==3.7 -iniconfig==2.0.0 -jiter==0.4.1 -marshmallow==3.21.2 -mock==5.1.0 -multidict==6.0.5 -mypy-extensions==1.0.0 -numexpr==2.10.0 -numpy==1.26.4 -opentracing==2.4.0 -packaging==24.0 -pluggy==1.5.0 -psutil==5.9.8 -pydantic==2.7.2 -pydantic-core==2.18.3 -pytest==8.2.1 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -pyyaml==6.0.1 -regex==2024.5.15 -requests==2.32.3 -sentencepiece==0.2.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tenacity==8.3.0 -tiktoken==0.7.0 -tokenizers==0.19.1 -tomli==2.0.1 -tqdm==4.66.4 -typing-extensions==4.12.1 -typing-inspect==0.9.0 -urllib3==2.2.1 -vcrpy==6.0.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/ceb0f20.txt b/.riot/requirements/ceb0f20.txt deleted file mode 100644 index a8be801ba17..00000000000 --- a/.riot/requirements/ceb0f20.txt +++ /dev/null @@ -1,48 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/ceb0f20.in -# -annotated-types==0.7.0 -anthropic==0.28.0 -anyio==4.4.0 -attrs==23.2.0 -certifi==2024.6.2 -charset-normalizer==3.3.2 -coverage[toml]==7.5.3 -distro==1.9.0 -exceptiongroup==1.2.1 -filelock==3.14.0 -fsspec==2024.6.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -huggingface-hub==0.23.2 -hypothesis==6.45.0 -idna==3.7 -iniconfig==2.0.0 -jiter==0.4.1 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.0 -pluggy==1.5.0 -pydantic==2.7.3 -pydantic-core==2.18.4 -pytest==8.2.2 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pyyaml==6.0.1 -requests==2.32.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tokenizers==0.19.1 -tomli==2.0.1 -tqdm==4.66.4 -typing-extensions==4.12.1 -urllib3==2.2.1 -vcrpy==6.0.1 -wrapt==1.16.0 -yarl==1.9.4 From e630709384a34bc4423807e7458ec9bfcd2bfa1b Mon Sep 17 00:00:00 2001 From: William Conti Date: Thu, 6 Jun 2024 10:47:40 -0400 Subject: [PATCH 27/33] add release note --- .../add-anthropic-streaming-support-01937d2e524f1bd0.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml diff --git a/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml b/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml new file mode 100644 index 00000000000..eb396075891 --- /dev/null +++ b/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Anthropic: Add message streaming and async messaging streaming support. From 8f5e76d52ddcfeff55199cb5a4888e3aa54aa8fe Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Fri, 7 Jun 2024 09:28:05 -0400 Subject: [PATCH 28/33] Update releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml Co-authored-by: Yun Kim <35776586+Yun-Kim@users.noreply.github.com> --- .../add-anthropic-streaming-support-01937d2e524f1bd0.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml b/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml index eb396075891..2efc98b66d0 100644 --- a/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml +++ b/releasenotes/notes/add-anthropic-streaming-support-01937d2e524f1bd0.yaml @@ -1,4 +1,5 @@ --- features: - | - Anthropic: Add message streaming and async messaging streaming support. + Anthropic: Adds support for tracing synchronous and asynchronous message streaming. + LLM Observability: Adds support for tracing synchronous and asynchronous message streaming. From 1c1df2f4a78490614ee8a13dfe1f5e9004b7b72a Mon Sep 17 00:00:00 2001 From: William Conti Date: Fri, 7 Jun 2024 10:30:16 -0400 Subject: [PATCH 29/33] more changes --- ddtrace/contrib/anthropic/_streaming.py | 57 ++--- ddtrace/contrib/anthropic/patch.py | 23 +-- ddtrace/llmobs/_integrations/anthropic.py | 4 +- ...hropic_completion_async_stream_helper.yaml | 195 ------------------ ...> anthropic_completion_stream_helper.yaml} | 0 .../cassettes/anthropic_hello_world.yaml | 86 -------- tests/contrib/anthropic/conftest.py | 20 +- tests/contrib/anthropic/test_anthropic.py | 8 +- ...st_anthropic.test_anthropic_llm_basic.json | 39 ---- ...t_anthropic.test_anthropic_llm_stream.json | 6 +- ...pic.test_anthropic_llm_stream_helper.json} | 6 +- ...est_anthropic_llm_async_stream_helper.json | 39 ---- 12 files changed, 64 insertions(+), 419 deletions(-) delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml rename tests/contrib/anthropic/cassettes/{anthropic_completion_sync_stream_helper.yaml => anthropic_completion_stream_helper.yaml} (100%) delete mode 100644 tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json rename tests/snapshots/{tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json => tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_helper.json} (86%) delete mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json diff --git a/ddtrace/contrib/anthropic/_streaming.py b/ddtrace/contrib/anthropic/_streaming.py index 5bd7ccdb1e6..e7222ee947f 100644 --- a/ddtrace/contrib/anthropic/_streaming.py +++ b/ddtrace/contrib/anthropic/_streaming.py @@ -1,12 +1,12 @@ import sys +from typing import Any from typing import Dict from typing import Tuple from ddtrace.internal.logger import get_logger +from ddtrace.llmobs._integrations.anthropic import _get_attr from ddtrace.vendor import wrapt -from .utils import _get_attr - log = get_logger(__name__) @@ -25,9 +25,8 @@ def handle_streamed_response(integration, resp, args, kwargs, span): class BaseTracedAnthropicStream(wrapt.ObjectProxy): def __init__(self, wrapped, integration, span, args, kwargs): super().__init__(wrapped) - n = kwargs.get("n", 1) or 1 self._dd_span = span - self._streamed_chunks = [[] for _ in range(n)] + self._streamed_chunks = [] self._dd_integration = integration self._kwargs = kwargs self._args = args @@ -60,7 +59,8 @@ def __next__(self): self._dd_span.finish() raise - def _text_stream(self): + def __stream_text__(self): + # this is overridden because it is a helper function that collects all stream content chunks for chunk in self: if chunk.type == "content_block_delta" and chunk.delta.type == "text_delta": yield chunk.delta.text @@ -97,7 +97,8 @@ async def __anext__(self): self._dd_span.finish() raise - async def _text_stream(self): + async def __stream_text__(self): + # this is overridden because it is a helper function that collects all stream content chunks async for chunk in self: if chunk.type == "content_block_delta" and chunk.delta.type == "text_delta": yield chunk.delta.text @@ -113,7 +114,8 @@ def __enter__(self): self._args, self._kwargs, ) - traced_stream.text_stream = traced_stream._text_stream() + # we need to set a text_stream attribute so we can trace the yielded chunks + traced_stream.text_stream = traced_stream.__stream_text__() return traced_stream def __exit__(self, exc_type, exc_val, exc_tb): @@ -130,7 +132,8 @@ async def __aenter__(self): self._args, self._kwargs, ) - traced_stream.text_stream = traced_stream._text_stream() + # we need to set a text_stream attribute so we can trace the yielded chunks + traced_stream.text_stream = traced_stream.__stream_text__() return traced_stream async def __aexit__(self, exc_type, exc_val, exc_tb): @@ -178,6 +181,7 @@ def _extract_from_chunk(chunk, message={}) -> Tuple[Dict[str, str], bool]: "content_block_start": _on_content_block_start_chunk, "content_block_delta": _on_content_block_delta_chunk, "message_delta": _on_message_delta_chunk, + "error": _on_error_chunk, } chunk_type = getattr(chunk, "type", "") transformation = TRANSFORMATIONS_BY_BLOCK_TYPE.get(chunk_type) @@ -195,6 +199,7 @@ def _on_message_start_chunk(chunk, message): chunk_message = getattr(chunk, "message", "") if chunk_message: content_text = "" + content_type = "" contents = getattr(chunk.message, "content", []) for content in contents: if content.type == "text": @@ -254,21 +259,23 @@ def _on_message_delta_chunk(chunk, message): chunk_usage = getattr(chunk, "usage", {}) if chunk_usage: message_usage = message.get("usage", {"output_tokens": 0, "input_tokens": 0}) - message_usage["output_tokens"] += getattr(chunk_usage, "output_tokens", 0) - message_usage["input_tokens"] += getattr(chunk_usage, "input_tokens", 0) + message_usage["output_tokens"] = getattr(chunk_usage, "output_tokens", 0) message["usage"] = message_usage return message -# To-Do: Handle error blocks appropriately -# def _on_error_chunk(chunk, message): -# # this is the start to a message.content block (possibly 1 of several content blocks) -# if getattr(chunk, "type", "") != "error": -# return message +def _on_error_chunk(chunk, message): + if getattr(chunk, "type", "") != "error": + return message -# message["content"].append({"type": "text", "text": ""}) -# return message + if getattr(chunk, "error"): + message["error"] = {} + if getattr(chunk.error, "type"): + message["error"]["type"] = chunk.error.type + if getattr(chunk.error, "message"): + message["error"]["message"] = chunk.error.message + return message def _tag_streamed_chat_completion_response(integration, span, message): @@ -276,8 +283,8 @@ def _tag_streamed_chat_completion_response(integration, span, message): if message is None: return for idx, block in enumerate(message["content"]): - span.set_tag_str("anthropic.response.completions.content.%d.type" % idx, str(integration.trunc(block["type"]))) - span.set_tag_str("anthropic.response.completions.content.%d.text" % idx, str(integration.trunc(block["text"]))) + span.set_tag_str(f"anthropic.response.completions.content.{idx}.type", str(block["type"])) + span.set_tag_str(f"anthropic.response.completions.content.{idx}.text", str(block["text"])) span.set_tag_str("anthropic.response.completions.role", str(message["role"])) if message.get("finish_reason") is not None: span.set_tag_str("anthropic.response.completions.finish_reason", str(message["finish_reason"])) @@ -286,7 +293,7 @@ def _tag_streamed_chat_completion_response(integration, span, message): integration.record_usage(span, usage) -def _is_stream(resp): +def _is_stream(resp: Any) -> bool: # type: (...) -> bool import anthropic @@ -295,7 +302,7 @@ def _is_stream(resp): return False -def _is_async_stream(resp): +def _is_async_stream(resp: Any) -> bool: # type: (...) -> bool import anthropic @@ -304,7 +311,7 @@ def _is_async_stream(resp): return False -def _is_stream_manager(resp): +def _is_stream_manager(resp: Any) -> bool: # type: (...) -> bool import anthropic @@ -313,10 +320,14 @@ def _is_stream_manager(resp): return False -def _is_async_stream_manager(resp): +def _is_async_stream_manager(resp: Any) -> bool: # type: (...) -> bool import anthropic if hasattr(anthropic, "AsyncMessageStreamManager") and isinstance(resp, anthropic.AsyncMessageStreamManager): return True return False + + +def is_streaming_operation(resp: Any) -> bool: + return _is_stream(resp) or _is_async_stream(resp) or _is_stream_manager(resp) or _is_async_stream_manager(resp) diff --git a/ddtrace/contrib/anthropic/patch.py b/ddtrace/contrib/anthropic/patch.py index 1df8d2a0aa6..030e3189d5e 100644 --- a/ddtrace/contrib/anthropic/patch.py +++ b/ddtrace/contrib/anthropic/patch.py @@ -10,11 +10,12 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._integrations import AnthropicIntegration +from ddtrace.llmobs._integrations.anthropic import _get_attr from ddtrace.pin import Pin from ._streaming import handle_streamed_response +from ._streaming import is_streaming_operation from .utils import _extract_api_key -from .utils import _get_attr from .utils import handle_non_streamed_response from .utils import tag_params_on_span @@ -42,11 +43,9 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): integration = anthropic._datadog_integration stream = False - operation_name = "stream" if "stream" in kwargs else func.__name__ - span = integration.trace( pin, - "%s.%s" % (instance.__class__.__name__, operation_name), + "%s.%s" % (instance.__class__.__name__, func.__name__), submit_to_llmobs=True, interface_type="chat_model", provider="anthropic", @@ -95,11 +94,7 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): chat_completions = func(*args, **kwargs) - if ( - isinstance(chat_completions, anthropic.Stream) - or isinstance(chat_completions, anthropic.lib.streaming._messages.MessageStreamManager) - or isinstance(chat_completions, anthropic.lib.streaming._messages.AsyncMessageStreamManager) - ): + if is_streaming_operation(chat_completions): stream = True return handle_streamed_response(integration, chat_completions, args, kwargs, span) else: @@ -109,7 +104,7 @@ def traced_chat_model_generate(anthropic, pin, func, instance, args, kwargs): raise finally: # we don't want to finish the span if it is a stream as it will get finished once the iterator is exhausted - if not stream: + if span.error or not stream: if integration.is_pc_sampled_llmobs(span): integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) span.finish() @@ -122,11 +117,9 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, integration = anthropic._datadog_integration stream = False - operation_name = "stream" if "stream" in kwargs else func.__name__ - span = integration.trace( pin, - "%s.%s" % (instance.__class__.__name__, operation_name), + "%s.%s" % (instance.__class__.__name__, func.__name__), submit_to_llmobs=True, interface_type="chat_model", provider="anthropic", @@ -175,7 +168,7 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, chat_completions = await func(*args, **kwargs) - if isinstance(chat_completions, anthropic.AsyncStream): + if is_streaming_operation(chat_completions): stream = True return handle_streamed_response(integration, chat_completions, args, kwargs, span) else: @@ -185,7 +178,7 @@ async def traced_async_chat_model_generate(anthropic, pin, func, instance, args, raise finally: # we don't want to finish the span if it is a stream as it will get finished once the iterator is exhausted - if not stream: + if span.error or not stream: if integration.is_pc_sampled_llmobs(span): integration.llmobs_set_tags(span=span, resp=chat_completions, args=args, kwargs=kwargs) span.finish() diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index c00f02ea995..4e368e6de5c 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -134,8 +134,8 @@ def _extract_output_message(self, response): def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: if not usage: return - input_tokens = _get_attr(usage, "input_tokens", 0) - output_tokens = _get_attr(usage, "output_tokens", 0) + input_tokens = _get_attr(usage, "input_tokens", None) + output_tokens = _get_attr(usage, "output_tokens", None) if input_tokens is not None: span.set_metric("anthropic.response.usage.input_tokens", input_tokens) diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml deleted file mode 100644 index 531a058d414..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_completion_async_stream_helper.yaml +++ /dev/null @@ -1,195 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": "Can you explain - what Descartes meant by ''I think, therefore I am''?"}], "model": "claude-3-opus-20240229", - "stream": true}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '182' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - AsyncAnthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - x-stainless-stream-helper: - - messages - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: 'event: message_start - - data: {"type":"message_start","message":{"id":"msg_01NuXdck4ZpJDQsVrGiSfXKj","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":27,"output_tokens":1}} } - - - event: content_block_start - - data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} } - - - event: ping - - data: {"type": "ping"} - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"The"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - phrase"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - \""} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - think"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":","} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - therefore"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - I"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - am"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"\""} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - ("} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"originally"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - in"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - Latin"} } - - - event: content_block_delta - - data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" - as"} } - - - event: content_block_stop - - data: {"type":"content_block_stop","index":0 } - - - event: message_delta - - data: {"type":"message_delta","delta":{"stop_reason":"max_tokens","stop_sequence":null},"usage":{"output_tokens":15} } - - - event: message_stop - - data: {"type":"message_stop" } - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88e380a02a84726b-EWR - Cache-Control: - - no-cache - Connection: - - keep-alive - Content-Type: - - text/event-stream; charset=utf-8 - Date: - - Mon, 03 Jun 2024 23:35:57 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '4' - anthropic-ratelimit-requests-reset: - - '2024-06-03T23:35:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-03T23:35:57Z' - request-id: - - req_018CVoMUAAn8vhLNvTRkmB98 - via: - - 1.1 google - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml b/tests/contrib/anthropic/cassettes/anthropic_completion_stream_helper.yaml similarity index 100% rename from tests/contrib/anthropic/cassettes/anthropic_completion_sync_stream_helper.yaml rename to tests/contrib/anthropic/cassettes/anthropic_completion_stream_helper.yaml diff --git a/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml b/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml deleted file mode 100644 index 35b7345677f..00000000000 --- a/tests/contrib/anthropic/cassettes/anthropic_hello_world.yaml +++ /dev/null @@ -1,86 +0,0 @@ -interactions: -- request: - body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", - "text": "Reply: ''Hello World!'' when I say: ''Hello''"}, {"type": "text", "text": - "Hello"}]}, {"role": "assistant", "content": "Hello World!"}, {"role": "user", - "content": [{"type": "text", "text": "Hello"}]}], "model": "claude-3-opus-20240229", - "system": "Respond in all caps everytime.", "temperature": 0.8}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - anthropic-version: - - '2023-06-01' - connection: - - keep-alive - content-length: - - '384' - content-type: - - application/json - host: - - api.anthropic.com - user-agent: - - Anthropic/Python 0.28.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 0.28.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.9 - method: POST - uri: https://api.anthropic.com/v1/messages - response: - body: - string: !!binary | - H4sIAAAAAAAAA0yOzUrEQBCEX0XrPIHsuLBmjouCh8CyXkREwphpYnTSk6R7QAl5d8mi4Kngqx9q - QR/gMEjXlDtbvR868h/n0B2m8/GtGp7b4wQD/R5pS5GI7wgGc4ob8CK9qGeFwZACRTi00edAxU2R - xiyFLe2+tLaCQZtYiRXuZfkbVPraqhdxeLiv69PV0+mxvrvG+mogmsZmJi+J4UAcGs0z49cQmjJx - S3CcYzTIl29uQc9j1kbTJ7HA7XcGKet/dLuuPwAAAP//AwA1yXoc+AAAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 88f09dc5dffe421d-EWR - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 05 Jun 2024 13:47:46 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - anthropic-ratelimit-requests-limit: - - '5' - anthropic-ratelimit-requests-remaining: - - '4' - anthropic-ratelimit-requests-reset: - - '2024-06-05T13:47:57Z' - anthropic-ratelimit-tokens-limit: - - '10000' - anthropic-ratelimit-tokens-remaining: - - '10000' - anthropic-ratelimit-tokens-reset: - - '2024-06-05T13:47:57Z' - request-id: - - req_01BJ7GJG1YzsYSY3xUVoaNaX - via: - - 1.1 google - x-cloud-trace-context: - - 2810149c979072b48cbc451ef622b3a8 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index ad3b49dfdad..8ae466dd0bc 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -33,16 +33,18 @@ def snapshot_tracer(anthropic): @pytest.fixture def mock_tracer(ddtrace_global_config, anthropic): - pin = Pin.get_from(anthropic) - mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin.override(anthropic, tracer=mock_tracer) - pin.tracer.configure() - if ddtrace_global_config.get("_llmobs_enabled", False): - # Have to disable and re-enable LLMObs to use to mock tracer. + try: + pin = Pin.get_from(anthropic) + mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) + pin.override(anthropic, tracer=mock_tracer) + pin.tracer.configure() + if ddtrace_global_config.get("_llmobs_enabled", False): + # Have to disable and re-enable LLMObs to use to mock tracer. + LLMObs.disable() + LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) + yield mock_tracer + finally: LLMObs.disable() - LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) - yield mock_tracer - LLMObs.disable() @pytest.fixture diff --git a/tests/contrib/anthropic/test_anthropic.py b/tests/contrib/anthropic/test_anthropic.py index d05183f7d8d..65e0ca18c10 100644 --- a/tests/contrib/anthropic/test_anthropic.py +++ b/tests/contrib/anthropic/test_anthropic.py @@ -37,7 +37,7 @@ def test_global_tags(ddtrace_config_anthropic, anthropic, request_vcr, mock_trac @pytest.mark.snapshot(token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm", ignores=["resource"]) -def test_anthropic_llm_sync(anthropic, request_vcr): +def test_anthropic_llm_sync_create(anthropic, request_vcr): llm = anthropic.Anthropic() with request_vcr.use_cassette("anthropic_completion.yaml"): llm.messages.create( @@ -219,10 +219,8 @@ async def test_global_tags_async(ddtrace_config_anthropic, anthropic, request_vc @pytest.mark.asyncio -async def test_anthropic_llm_async_basic(anthropic, request_vcr, snapshot_context): - with snapshot_context( - token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic", ignores=["resource"] - ): +async def test_anthropic_llm_async_create(anthropic, request_vcr, snapshot_context): + with snapshot_context(token="tests.contrib.anthropic.test_anthropic.test_anthropic_llm", ignores=["resource"]): llm = anthropic.AsyncAnthropic() with request_vcr.use_cassette("anthropic_completion.yaml"): await llm.messages.create( diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json deleted file mode 100644 index 3bc6b1ea037..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_basic.json +++ /dev/null @@ -1,39 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.stream", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665f5f5900000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 15}, \"stream\": true}", - "anthropic.response.completions.content.0.text": "When Nietzsche famously declared \"God is dead\" in his", - "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "max_tokens", - "anthropic.response.completions.role": "assistant", - "language": "python", - "runtime-id": "23da57548a3443fa96c5bf9137d02aa9" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 22, - "anthropic.response.usage.output_tokens": 15, - "anthropic.response.usage.total_tokens": 37, - "process_id": 66314 - }, - "duration": 2572000, - "start": 1717526361825031000 - }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json index 3156c30fd3a..2a0c370ddcf 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream.json @@ -2,7 +2,7 @@ { "name": "anthropic.request", "service": "", - "resource": "Messages.stream", + "resource": "Messages.create", "trace_id": 0, "span_id": 1, "parent_id": 0, @@ -30,8 +30,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 27, - "anthropic.response.usage.output_tokens": 16, - "anthropic.response.usage.total_tokens": 43, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 42, "process_id": 33643 }, "duration": 10432000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_helper.json similarity index 86% rename from tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json rename to tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_helper.json index 069078aa916..da73b6cbde3 100644 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_sync_stream_helper.json +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_helper.json @@ -16,7 +16,7 @@ "anthropic.request.messages.0.content.0.type": "text", "anthropic.request.messages.0.role": "user", "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 15, \"model\": \"claude-3-opus-20240229\"}", + "anthropic.request.parameters": "{\"max_tokens\": 15}", "anthropic.response.completions.content.0.text": "The famous philosophical statement \"I think, therefore I am\" (originally in", "anthropic.response.completions.content.0.type": "text", "anthropic.response.completions.finish_reason": "max_tokens", @@ -30,8 +30,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, "anthropic.response.usage.input_tokens": 27, - "anthropic.response.usage.output_tokens": 16, - "anthropic.response.usage.total_tokens": 43, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 42, "process_id": 36523 }, "duration": 1474332000, diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json deleted file mode 100644 index bdaf0439fd1..00000000000 --- a/tests/snapshots/tests.contrib.anthropic.test_anthropic_async.test_anthropic_llm_async_stream_helper.json +++ /dev/null @@ -1,39 +0,0 @@ -[[ - { - "name": "anthropic.request", - "service": "", - "resource": "AsyncMessages.stream", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "665e542e00000000", - "anthropic.request.api_key": "sk-...key>", - "anthropic.request.messages.0.content.0.text": "Can you explain what Descartes meant by 'I think, therefore I am'?", - "anthropic.request.messages.0.content.0.type": "text", - "anthropic.request.messages.0.role": "user", - "anthropic.request.model": "claude-3-opus-20240229", - "anthropic.request.parameters": "{\"max_tokens\": 15, \"model\": \"claude-3-opus-20240229\"}", - "anthropic.response.completions.content.0.text": "The phrase \"I think, therefore I am\" (originally in Latin as", - "anthropic.response.completions.content.0.type": "text", - "anthropic.response.completions.finish_reason": "max_tokens", - "anthropic.response.completions.role": "assistant", - "language": "python", - "runtime-id": "f59aff6a77ac4933a3d59e282a8126b2" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "anthropic.response.usage.input_tokens": 27, - "anthropic.response.usage.output_tokens": 16, - "anthropic.response.usage.total_tokens": 43, - "process_id": 5638 - }, - "duration": 7087734000, - "start": 1717457966661153000 - }]] From 613bf82a8cc7e104f7b2b5fa7a4b45c8d259c6a7 Mon Sep 17 00:00:00 2001 From: William Conti Date: Fri, 7 Jun 2024 10:49:41 -0400 Subject: [PATCH 30/33] fix function signature --- ddtrace/contrib/anthropic/_streaming.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ddtrace/contrib/anthropic/_streaming.py b/ddtrace/contrib/anthropic/_streaming.py index e7222ee947f..147075be614 100644 --- a/ddtrace/contrib/anthropic/_streaming.py +++ b/ddtrace/contrib/anthropic/_streaming.py @@ -294,7 +294,6 @@ def _tag_streamed_chat_completion_response(integration, span, message): def _is_stream(resp: Any) -> bool: - # type: (...) -> bool import anthropic if hasattr(anthropic, "Stream") and isinstance(resp, anthropic.Stream): @@ -303,7 +302,6 @@ def _is_stream(resp: Any) -> bool: def _is_async_stream(resp: Any) -> bool: - # type: (...) -> bool import anthropic if hasattr(anthropic, "AsyncStream") and isinstance(resp, anthropic.AsyncStream): @@ -312,7 +310,6 @@ def _is_async_stream(resp: Any) -> bool: def _is_stream_manager(resp: Any) -> bool: - # type: (...) -> bool import anthropic if hasattr(anthropic, "MessageStreamManager") and isinstance(resp, anthropic.MessageStreamManager): @@ -321,7 +318,6 @@ def _is_stream_manager(resp: Any) -> bool: def _is_async_stream_manager(resp: Any) -> bool: - # type: (...) -> bool import anthropic if hasattr(anthropic, "AsyncMessageStreamManager") and isinstance(resp, anthropic.AsyncMessageStreamManager): From fafd19584a2f56b9b7b9548f662e51f31581a8f2 Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 11 Jun 2024 05:45:48 -0400 Subject: [PATCH 31/33] remove content block start --- ddtrace/contrib/anthropic/_streaming.py | 33 +++++++++---------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/ddtrace/contrib/anthropic/_streaming.py b/ddtrace/contrib/anthropic/_streaming.py index 147075be614..8295530e565 100644 --- a/ddtrace/contrib/anthropic/_streaming.py +++ b/ddtrace/contrib/anthropic/_streaming.py @@ -33,6 +33,11 @@ def __init__(self, wrapped, integration, span, args, kwargs): class TracedAnthropicStream(BaseTracedAnthropicStream): + def __init__(self, wrapped, integration, span, args, kwargs): + super().__init__(wrapped, integration, span, args, kwargs) + # we need to set a text_stream attribute so we can trace the yielded chunks + self.text_stream = self.__stream_text__() + def __enter__(self): self.__wrapped__.__enter__() return self @@ -67,6 +72,11 @@ def __stream_text__(self): class TracedAnthropicAsyncStream(BaseTracedAnthropicStream): + def __init__(self, wrapped, integration, span, args, kwargs): + super().__init__(wrapped, integration, span, args, kwargs) + # we need to set a text_stream attribute so we can trace the yielded chunks + self.text_stream = self.__stream_text__() + async def __aenter__(self): await self.__wrapped__.__aenter__() return self @@ -114,8 +124,6 @@ def __enter__(self): self._args, self._kwargs, ) - # we need to set a text_stream attribute so we can trace the yielded chunks - traced_stream.text_stream = traced_stream.__stream_text__() return traced_stream def __exit__(self, exc_type, exc_val, exc_tb): @@ -132,8 +140,6 @@ async def __aenter__(self): self._args, self._kwargs, ) - # we need to set a text_stream attribute so we can trace the yielded chunks - traced_stream.text_stream = traced_stream.__stream_text__() return traced_stream async def __aexit__(self, exc_type, exc_val, exc_tb): @@ -174,7 +180,7 @@ def _construct_message(streamed_chunks): return message -def _extract_from_chunk(chunk, message={}) -> Tuple[Dict[str, str], bool]: +def _extract_from_chunk(chunk, message) -> Tuple[Dict[str, str], bool]: """Constructs a chat message dictionary from streamed chunks given chunk type""" TRANSFORMATIONS_BY_BLOCK_TYPE = { "message_start": _on_message_start_chunk, @@ -198,29 +204,14 @@ def _on_message_start_chunk(chunk, message): chunk_message = getattr(chunk, "message", "") if chunk_message: - content_text = "" - content_type = "" - contents = getattr(chunk.message, "content", []) - for content in contents: - if content.type == "text": - content_text += content.text - content_type = "text" - elif content.type == "image": - content_text = "([IMAGE DETECTED])" - content_type = "image" - message["content"].append({"text": content_text, "type": content_type}) - chunk_role = getattr(chunk_message, "role", "") chunk_usage = getattr(chunk_message, "usage", "") - chunk_finish_reason = getattr(chunk_message, "stop_reason", "") if chunk_role: message["role"] = chunk_role if chunk_usage: message["usage"] = {} message["usage"]["input_tokens"] = getattr(chunk_usage, "input_tokens", 0) - message["usage"]["output_tokens"] = getattr(chunk_usage, "output_tokens", 0) - if chunk_finish_reason: - message["finish_reason"] = chunk_finish_reason + message["usage"]["output_tokens"] = 0 return message From 952ed59d04aa7871564027fbe97d7e55d0f66390 Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 11 Jun 2024 13:10:44 -0400 Subject: [PATCH 32/33] more tests --- ddtrace/contrib/anthropic/_streaming.py | 32 +----- .../cassettes/anthropic_create_image.yaml | 86 ++++++++++++++ tests/contrib/anthropic/images/bits.png | Bin 0 -> 55752 bytes tests/contrib/anthropic/test_anthropic.py | 33 ++++++ .../anthropic/test_anthropic_llmobs.py | 106 ++++++++++++++++++ ...ropic.test_anthropic_llm_create_image.json | 41 +++++++ ...ropic.test_anthropic_llm_stream_image.json | 41 +++++++ 7 files changed, 310 insertions(+), 29 deletions(-) create mode 100644 tests/contrib/anthropic/cassettes/anthropic_create_image.yaml create mode 100644 tests/contrib/anthropic/images/bits.png create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_create_image.json create mode 100644 tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_image.json diff --git a/ddtrace/contrib/anthropic/_streaming.py b/ddtrace/contrib/anthropic/_streaming.py index 8295530e565..a0103d33e01 100644 --- a/ddtrace/contrib/anthropic/_streaming.py +++ b/ddtrace/contrib/anthropic/_streaming.py @@ -3,6 +3,8 @@ from typing import Dict from typing import Tuple +import anthropic + from ddtrace.internal.logger import get_logger from ddtrace.llmobs._integrations.anthropic import _get_attr from ddtrace.vendor import wrapt @@ -148,7 +150,6 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): def _process_finished_stream(integration, span, args, kwargs, streamed_chunks): # builds the response message given streamed chunks and sets according span tags - resp_message = {} try: resp_message = _construct_message(streamed_chunks) @@ -174,9 +175,6 @@ def _construct_message(streamed_chunks): message = {"content": []} for chunk in streamed_chunks: message = _extract_from_chunk(chunk, message) - - if "finish_reason" in message: - return message return message @@ -199,9 +197,6 @@ def _extract_from_chunk(chunk, message) -> Tuple[Dict[str, str], bool]: def _on_message_start_chunk(chunk, message): # this is the starting chunk of the message - if getattr(chunk, "type", "") != "message_start": - return message - chunk_message = getattr(chunk, "message", "") if chunk_message: chunk_role = getattr(chunk_message, "role", "") @@ -211,24 +206,17 @@ def _on_message_start_chunk(chunk, message): if chunk_usage: message["usage"] = {} message["usage"]["input_tokens"] = getattr(chunk_usage, "input_tokens", 0) - message["usage"]["output_tokens"] = 0 return message def _on_content_block_start_chunk(chunk, message): # this is the start to a message.content block (possibly 1 of several content blocks) - if getattr(chunk, "type", "") != "content_block_start": - return message - message["content"].append({"type": "text", "text": ""}) return message def _on_content_block_delta_chunk(chunk, message): # delta events contain new content for the current message.content block - if getattr(chunk, "type", "") != "content_block_delta": - return message - delta_block = getattr(chunk, "delta", "") chunk_content = getattr(delta_block, "text", "") if chunk_content: @@ -238,9 +226,6 @@ def _on_content_block_delta_chunk(chunk, message): def _on_message_delta_chunk(chunk, message): # message delta events signal the end of the message - if getattr(chunk, "type", "") != "message_delta": - return message - delta_block = getattr(chunk, "delta", "") chunk_finish_reason = getattr(delta_block, "stop_reason", "") if chunk_finish_reason: @@ -257,9 +242,6 @@ def _on_message_delta_chunk(chunk, message): def _on_error_chunk(chunk, message): - if getattr(chunk, "type", "") != "error": - return message - if getattr(chunk, "error"): message["error"] = {} if getattr(chunk.error, "type"): @@ -275,7 +257,7 @@ def _tag_streamed_chat_completion_response(integration, span, message): return for idx, block in enumerate(message["content"]): span.set_tag_str(f"anthropic.response.completions.content.{idx}.type", str(block["type"])) - span.set_tag_str(f"anthropic.response.completions.content.{idx}.text", str(block["text"])) + span.set_tag_str(f"anthropic.response.completions.content.{idx}.text", integration.trunc(str(block["text"]))) span.set_tag_str("anthropic.response.completions.role", str(message["role"])) if message.get("finish_reason") is not None: span.set_tag_str("anthropic.response.completions.finish_reason", str(message["finish_reason"])) @@ -285,32 +267,24 @@ def _tag_streamed_chat_completion_response(integration, span, message): def _is_stream(resp: Any) -> bool: - import anthropic - if hasattr(anthropic, "Stream") and isinstance(resp, anthropic.Stream): return True return False def _is_async_stream(resp: Any) -> bool: - import anthropic - if hasattr(anthropic, "AsyncStream") and isinstance(resp, anthropic.AsyncStream): return True return False def _is_stream_manager(resp: Any) -> bool: - import anthropic - if hasattr(anthropic, "MessageStreamManager") and isinstance(resp, anthropic.MessageStreamManager): return True return False def _is_async_stream_manager(resp: Any) -> bool: - import anthropic - if hasattr(anthropic, "AsyncMessageStreamManager") and isinstance(resp, anthropic.AsyncMessageStreamManager): return True return False diff --git a/tests/contrib/anthropic/cassettes/anthropic_create_image.yaml b/tests/contrib/anthropic/cassettes/anthropic_create_image.yaml new file mode 100644 index 00000000000..300824bae0b --- /dev/null +++ b/tests/contrib/anthropic/cassettes/anthropic_create_image.yaml @@ -0,0 +1,86 @@ +interactions: +- request: + body: '{"max_tokens": 15, "messages": [{"role": "user", "content": [{"type": "text", + "text": "Hello, what do you see in the following image?"}, {"type": "image", + "source": {"type": "base64", "media_type": "image/png", "data": "iVBORw0KGgoAAAANSUhEUgAAAZUAAAGVCAIAAAC5OftsAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAABlaADAAQAAAABAAABlQAAAAAnfwwwAABAAElEQVR4Aex9CYBcRZl/vaOvue9Mksl9kgAJJECAcMohBLkEZBd0UVdA18UL12NdVnf9e7vrirq6iKLouognlyjIfQQCEgK5L5KQY46enumZnr7e8f99Vd2v33T3zHRPH4mkKi9v6tWr+qrq112//qrqqyrl4YcfnjdvnqqqlmXhbpomY0xRFOGHB862bXHHK/EWIY7H/Yq/T90Q7n6UfomAREAiUCICYBVQE4SAgpLJpA7ymjt3LoIESYHF8A6R4Bw/XokIIg5ewSPKkfXKXTjxyh0i/RIBiYBEYNIIQLsyDAPEgnsikRgYGNBBRuAmSIQHd+EXGSBEBDp34XEiOx6Kl6Nt5YYIsfIuEZAISAQmgQAoRdeJsjRNwx1Epo8jRRLQOODIVxIBiUCVEXC6icjX4/HQkFdWCUBpcCLQ8WTFcT8WEscdX/olAhIBicCkERBKlaOC6URX6eF5oZIJ0W4/QsSjiOl+5aQVHnexEOLujbpfSb9EQCIgESgFAcEtGf4Sshw+ckhKeJxHREBMcUeg43c8IhCPThL4pZMISAQkAqUjIFgFPUcMgUEaDYbBwefchUeE0Lv0wPxYHoQL1ssqnBM/K1w+SgQkAhKBSSMAtnGcDjXMTTTuHp/jFxHc0Rx/7qtJF0smlAhIBCQCRSGgw6SChvG5/SrIyLH5QgheuXlKcB5C4OAX2cCDmM4rd97ilTtE+iUCEgGJQCkIgHzcyTP9R6FtcXZKxRB+wVYimfsOKc4r4RFyRRx3HtIvEZAISAQqgcB49l+F5+eoY0ji+CWRFQ6gjCkRkAhMAoFs/nKTjtvviBaBuDs8hVfiMW98J6H0SAQkAhKB8iKQ6T86ct00JPzue1Y0J7LjcSJIj0RAIiARqCgCqaF3dx5QrBzdSnicO71wjdwjVdajW470SwQkAhKBiiKQsV8FE0GHcvjI7Xe/EnoWQuBQMtxFTPFY0bJK4RIBicBbDAHBJ5OuVKb/6AhyPBAKv/PoeERm/E1mptIpQVY0J1x6JAISAYlA6QgIhhH3DH8JucKKIsvvTiBeOSSV6ym9fFKCREAiIBEoBIHs/qNjvwpigt9NT9Rj5L1FBMIjpMMj7VcLAVrGkQhIBMqOQMZ+QlCVQ1jIyQlx2Aoh7kDhd8d0l8956w6UfomAREAiUC4EMvt/CZLCXXiQgeNxZyYCnbvjEfHx6Dh3KumXCEgEJAJlR2Bi/StvltCtwFOOhpX1mDeJDJQISAQkAuVFgDafgMsS6oQIT+4d8Z04WWnlo0RAIiARqA4CeexXkTF0K5G98OTe3XGqU1CZi0RAIiARyEJg4vEvh8uQ0vE7jOb2wO92YiozKz/5KBGQCEgEyoVAhr/GkujuJ7r9WfFzX7lNybIiy0eJgERAIlA6Atnj95CYy0RjZVN4zLEkyHCJgERAIjBpBDL8NWkRWQklqWUBIh8lAhKBCiEwcf+xQhlLsRIBiYBEoEQERh3eAVlCe3LfnQzcgfCLR/FW+HlYxhRDjt870EmPREAiUAkEKqh/yfH7SnxgUqZEQCLgIFBB/nLykB6JgERAIlAJBCR/VQJVKVMiIBGoBgKSv6qBssxDIiARqAQCkr8qgaqUKRGQCFQDAclf1UBZ5iERkAhUAgHJX5VAVcqUCEgEqoGA5K9qoCzzkAhIBCqBgOSvSqAqZUoEJALVQEDyVzVQlnlIBCQClUBA8lclUJUyJQISgWogIPmrGijLPCQCEoFKICD5qxKoSpkSAYlANRCQ/FUNlGUeEgGJQCUQkPxVCVSlTImARKAaCEj+qgbKMg+JgESgEghI/qoEqlKmREAiUA0EJH9VA2WZh0RAIlAJBCR/VQJVKVMiIBGoBgKSv6qBssxDIiARqAQCkr8qgaqUKRGQCFQDAclf1UBZ5iERkAhUAgHJX5VAVcqUCEgEqoGA5K9qoCzzkAhIBCqBgOSvSqAqZUoEJALVQEDyVzVQlnlIBCQClUBA8lclUJUyJQISgWogIPmrGijLPCQCEoFKICD5qxKoSpkSAYlANRCQ/FUNlGUeEgGJQCUQkPxVCVSlTImARKAaCEj+qgbKMg+JgESgEghI/qoEqlKmREAiUA0EJH9VA2WZh0RAIlAJBCR/VQJVKVMiIBGoBgJ6NTKReUgEyoSAzZ0QpnBXJsFSzF8lApK//io/tqOw0A5zSdo6Cj/9saos+WssZGT4EYSAZVkoDSjMuYvCaZp2BJVSFqXqCEj+qjrkMsPiEYDOhUSqWupwLfEfYyRLurcEAqV+Id4SIMhKHOkIFNhnhJomdLS89RHkhVeOJ280GfhXhIDUv/6KPqyjvahuehKMJvQyBxcoaOCmseiJuqBp5/xuj6WLjSVkrPhpwfJvVRGQ/FVVuGVmk0YAilWuFia0LTeL2Tn05aYtJd13RKCgMPAUKCmttaXYKS958S4sSZcUNukPsewJJX+VHVIpsCIIOCQFrnGckxO9VYlYRLQ0Z2XzkUNMeGGkH0hn46QkYuPJSqWDNCeHlAfzBYiQE5wdTT5XBwHJX9XBWeYySQRAVU5KwU24C48TLjyZeKP5RdCNeIv7KIE81GRMSzMSRVBTDyJJVi6O4pYVLh8PCwKSvw4L7DLTiREQRONYTuBR0BYGueCy0iOaxWx+T72yXToS6VO2KpLTXcFLes01thRbiWQIBUMJ5iIu49k4PU08CakiXPizSiIfq4mA5K9qoi3zKgIBQTeOhRf4i+jJsgzDwCvhQGTwQCh5oEapGpQpOMEv3Et85LBdmnF4Ek5G6DwqDlHxZBrueA86hAcpLR4BMnVSzfCK3qcFiSzk/XAhIPnrcCEv8y0OAfAUuMyhMxCZYDRIcVgM/lxicYcIaqIkiGkzDdwGwsMdl8Fifay/1w4GB4J9Q/3BwZ079jBL3b1rXyQ84gkwKzD0o1/exvzU25TuCEFA8tcR8kHIYhSHgNC/RBqhggmeIu0Jl3COB49gKLyxmGWyEHiqbzjUNxjqGTi499ChfYf6DgZDwUGWVIykbRqabam64ldVLzyKpSVjPqVes5LJ5DDz+IRoeT8iEJD8dUR8DNUvRLpp82Y9KnunszUqdKIHIWdyaUm2W0salVe6oAhEB5Ji0gAWOXc0cAv6lart99FbxmIsGWPdB+xQX/jAge6BvqGd296wTWXb5h3M1EzTsg3bNE3bhCKmeDVdZViI1AC1DMktDIupoEEI8jBFBZOpPg9oa8RWEjEESXcEISD56wj6MP6ai0LMlcuF7hrl5TYae3Kc249A8ejcaRxKzQxl8QiRQdbXN3zo0KGeQ4eCwWA0bBzcO3hwTy9oy6t4FcWjWLpqa7jbSGvqGmuBcYRuqWJyADIUvIaShRF9BV4VmWKwH3kSi3FShcqmaSqzDDPJYlFWm1VIp/DSczgQkPx1OFA/rHmihTr5Z3xOUGketG4lJT/DVxkf3iKGQwHj+DEshbcon8YSwywcYuEwO3Qw2NcTPHSot7cn1N/dPxiMRIZjZhIDYYquah6oUGAdoqqmOruJ5hjJwSSf5487CMqkvMnL1beUEocwEBiGwngKCiS2I/4Cl/GIzDRMRdUxOtY6b5Tex7OQt8OGgOSvwwb9kZoxOMNNOMUVE407pSMRUZDLQ1jiFe7IimiDPIMDLBRig/1DodDAQGh4oH8QnlD/0LbN24mDFK/KdAxdMVM1DcsydcVUNeb3qI1+0aVLoneHvBCDZ0qi8zglX80ca1WRQCPiA4UhKjEYAvlEAf6q/X0DzG7OI1cGHSYEJH8dJuAPd7bp9m2leWacAuVr9OnoQhtxlh2m+oMQKuSKbHA3aewc90SchYKstzcGTar7UF+wJ9jT04fJPsOwoEkZuEBPFjp4xCCKrXvtFttSyPbL1jSblCkdr8haArOAGFxHhlAoEUQVwb107cgRQZoXsRgRGDJCAUK9Q4xJ/kp/9kfAX8lfR8CHUPUioM1zhiGSEJ7RA0vuAqXIy+EFvtomRU8p3YqLIMLClWQYZDdNNtDHgkEjFBzoD4b7+/r7+wcG+sL9/SF4LIydG8QIIBwoVpqi09ATPaIDiN4f5w3024i2KHdwFgIZLnipaMRi0Ik4V1IAaIu/SHMyxSmz4zCp/b0DjM0ss2gprgQEJH+VAN5ESXljS0Vy2v9EiSr+HjNraPLU1bLpP/LDiA/IIa1l4W/a6yoLCIL6UuJCIoPZCZZMsL17WLB3KBgM9fX2h/pCMJ7q7wv1BwfAYmShlRRzhqqmaLDS0lRM39WDmPC1AyOkRqYYJvtSORJPoTyQD3YizqBwDNtz9DKlAv8iDkVTUsxVDXhtPQj+QqbSHTEISP6q1EdxJH/PSdmxoewowhKTKzJ8oJpbDaS4IF0BTLrt3zvS1wtiCg/0hPt7h/p6BweCQ5jjGxgY8mk+WrnD6QYsqGteXfXoWhNg5fxDU3vohRExCe7jlIS+JNeYiJKcqUD+SfAdcLgvw6j8ketfoN5UsSAVk4K8U0p8XGkHxHTN19/bX+mMpPyiEJD8VRRcE0dOt/pRA8hopulwNGLHZRQKrtiMCi9Dm0xnSXLT4qjbxZkKdwpEHD58zqIsPMAiYbune6C7uw8zfd2HQuHBob17DwyEwprmIeXJUjEwRRWzdTKEV7z1egeJBucQV6U7eibZiHKX7uVxrSpLrQP/iN4gL4eIT3dBppnnjI+y4CxIQRw7Z6gqE6kyPtRa8ejeIMbvpTuSEJD8Vc5Pw2EMN0shg6zHfFm6uYzeZz2n+YcYB1yBC6NEYmqfYjsZC794FHfoKJxQkETXGcaadm8L64r+0ovrI+HYQHAwEo5venWzbWgYOFctTWE+jDrBBh0Kmqb6VLWujtUyA2qU0NVEV04h1gPT4C56fFQIlCKr1MSQOVWhIDhH/3LgcTMXT5iCjcfE+Dn1KIlyuYNhqZBTtTssWUPBboIaV+bzqFr+MqM8CEj+ygNKsUGCKJDKzVNO4Ghp7lbnRHcHpqILDcZpJoghmjd6XqTrYHQpTWFiuDtFYRAJdotiHR8MOxN9vSHM7vUdCvZ292G5DMbRBwci1N1L8Q7lS4NQdi0M0EEQfImhh/5gag83EJpJA1fIKj2zJ5QmXuAxaji6vpRDTkgqwE1YCMrhLIqGtTyIxicgaXwfRvMgTSiDlkkWp5gFSMkCJDy9GFMrRTETuiSECQ+XDy7XbNOi1ZcxxrySvxzUD7NH8lfZPoBMS+K/0Fly3W/xirfpUQ3boSonYap7BZoQF+cmRKNmb8AgnPUHMcc3RHN8/SEoU9u37IiOJLdu2un31BqYB8RqPsO2TN7jM1WP5vUodbVaM1FV+tge0bFzeATMJnKnppvyEldSF09YEqTIaFTJnQKX4nFXnyqeIibTVGKWnbBtA1bzLe3NdXV1u7bvrqttSkZgNe/TbEwIuJOWUoTx0hJX0u+GgrWTzfX8E6lGtuMVSb4DApK/yvw1cLX6PJKddo8vf4aenIhZiaEl4UqwYA8L9VrB3kEMou/ff7DnYHdPT08oFIIVAjQpsY4PK425VQHWyjQlNZ/OPLqi+UBVMPPEej6YKNAGWNQH5JYKqSwzbZDzBe8W4pUwCnNK45Svoh5i+BRz8WJZimmrUU994rhlM1edvWLZSTMDbUSmINbNLw396u6Htr6y12s26RZ2hKiWs2FCEWue5SfOzGBXrdxlPjkISP7KgaTkAPqZdvWaUt9zDA5RP4hL540wpeAgxGYGhs8H2WAohpV8wd5gb09w7+43B/sjmObD3TZ008DyPZW0EN3r8wXQkVHVFvrwbMVLbINhKVofQ6NRpCpAv8IAFj3CUSzhUARSInIdcQf10yCESgkh8CNh3si5yScT4ih9oxLDHoJv6QVD14QyeMpZy27+6NleLKwGR6VXTqMGx6yu/5dT3rXpOeP7X/vJwKFBjx2AmqlpIGlnfnKU1HI9AJFgz8A8u7NcAqWcEhGQ/FUigJQ8t6FHRgyfrut4gdW/4jWoADw1zN7cY4ZpfUw43E/311/bOjw0Eo0kotE4dCgwiIqeUtLEUDWsz22r3ms1occHMqKBKN7vU7BOzySaoubKO3TcA+bh6h1lh+EqPjyE8aEMYVEs+pfHkf0XCaMyEmelySUnclkJTeQi+rBOoTjDwrZsaNVFx/zjP5+NqtOFmtnstz978oprz6I1QPjaqmzJmfpnp73/Nz99bO3Dr7FkoLa2NpFIljLy5ZRhTA/pXyHG+QtI5KAzZjr5okIISP4qD7D4KqNZkRoDZ7Hg/uHogPHG9v3DoUgohDm+yLYtO5JR04iZRtwyDcTRsILPxtYIDCznweI+lTXCA0t0zHMRW0EO5yP0+kh2blvhhMjzS99GkQvGayhcUFs6Rt6/VGpQCbK00It0jYjnjV2JQHflTGZYLDb72M6bPnmBiQ4wYBWvoZINWz/73qPXf/g8lMHkpNa5kN34yXO7ps245877h+KWx67hsZGmQk4N9g1yhod84Fa5jCpU/reaWMlfZftEOc1gYJ2ZcfbsQ6/++bfPjQwYAT3g89GWd7Zdp8EGwGQefO2hQpBGBYJBF483RGxODP2J619oHilNiheNKykpYiyxrG5NJ61hceoqUW7+5AWUmROTYCcCA47mCRJx78ANt9zsrSeSQrCIgJcnrVz1mX+6bfHxi1aeOUMDyfNJBb2JXXrTgsYpV//wG/caUSxEgs0HUOXMUgEuDpL+xYsqb0cAAvIHpMwfAr7bXi+r0ZuSQ5qeqPebrWq8zh4JWBGvHfMqcZ+STF2q4aXL9Ci4MJlm0gA8mrEgL/CLuJy+HBXUCc140JOki4gQDZ376a4mbS2ueNCfMsj6nV/5kqfaIqc2YhwIro5zWIkyhTUEw6aCSabETW3khg9eOW8ZJy+qUaY4c48LTO3o/NF3f26HaQaWXnEKwyTkWZfP+MRtHwi0mLYHk5UApCKKJLrZGP8ixataKGUqL335EJD8lQ+V0sLQ4Zs1d4aKjfPARXy0hpute2wTSha/bB3bVKmWl981tAqaHKQL9CHuo0vgsBUtVB59oTHxtmqpBjZwZ1Zc0RKKFktagzGzT/HFFI9hKQmwGG/xaHmpi0bGXBcPR3EpX05h+GKkSuJkTmVLkQZGmSa8MsyTTiUIJ3VHRhwg3EkUw9yEEtX88RNOnXvOxQtptD7NXKRnCREqW3rswljQePy+7cRfqAoPpx62zpafV/+ej73DV28qupi4HY1hkU/g0zwXTYNw8sInJTuPRUJaieiSvyqBKmtubbZ12nKFVtJQs4duRBf1a0jJEs0x1ShJ4RL9nQmaBNpr2gn9gvpH/FKhZMXj9tAI6ze9gwl//+xlLVOX1Jh1Ibt+2PSPWFrcUlEUl4S0pML+Ep0VFrPoWEABRwupzNC8yUBr8uaPX+prTuGBVxmHB41N7+q0osqzf34BG10I+CgCf8V87NQLZ1z93ou0OihxMDMtpb6ZbLN8B/YdIsFS/8rC5TA9yvGvMgMvWnlLO8gKFuIwbTBVxYMxeM4cqfaYb44MCgiSjsUvLu4QkqjUFBmibMWEhmUoI1Pnt5x0+uqz3r6seQrz1HKDA5v9ZW3/A798YtPaPV6rTmM+DLaJ3IWqla68S346aJy/o9NmR0zXzlXjiVo7LB+gRcbZ4Oe/+KkaGCeglLl8CfBUNrVrqplgmBgBiWApArTbFKZIACgYO+ey2ZHYqQ/8/DkrYmJRlJhUzS7i5J/V8OAw1ooyzBMgs0zek5coU5aCgOSvUtAbM21TKxoh+o6iv0Eq2MQ/2SCmlBY2pli8gERS6+hKJq1Ywog2ttVPn9162dXXnXBKrdpAMiwNJ+mAOGljmuNXtxx34pVPP9Bzx3/8zI6hTxQgTbCAZs17kXlKAvJCcpslTTXJOTT1FULvEssneQLaTlC0bxyRgZ2dc6VAAsnB/oM4SAPLmdSREePQbV/52PRjvCYOzeDJ85h6qGxa1xSP6kG/e9Or3UtOnUJMlyYRzOrCsAQHnV1wxbKRQfPP971oj3jB1gAMI/q845dbkKJDsFtZYoR5W0T9ik4uE5QXgeJ+eMub91tMGn6PoXKkWhMOimhrpLEszjROcClVFqP7GIlHT9BWY6YnEphqXvK+0//1+zf92w+vWnFxrd3Ckl5meGnkm8bzofspWHnN1Hq2+vKOD//r9YEWEBr1atPFwKcvLgrIBKf9PEdnVoGGqzTiLtCLxfRkTO+/5Lqzv3PXDYFmNmIFm6fWzZ4/A2XDMReWHmuf3oRxKAyl094VNBCYufiwG3Ci0TSPR1f9hlUb+fQ3PrjwlCYLK8yJkkCAaVpKl1X0uRtbPWAi7IC48ZXtWR1E0oeQUGf+RnbNB06cf0LbiNFvWAn+scCszumkpyVO4i/9wGj92EQnDWH67yRkySRlQEDqX2UAMVsEnxRramvs3dkLiypqi9yVQQXQLeZNxKzwrAXTTzrj+MvfvRjj1oaHxalZUSnQnGgNY8r0C3/RaFkS3BFgJ7+t4+HfNu5+tcenNpqYo0PnK5siiMIgKd0BzK4WycciZhXd1ZGmqYEvf/nWtqnsV/+7sS+x/+1Xv/3St6+49R+/a3ntrjlT3nn1O3GQ4ndv/wFOAOofOthQ2+CQI/aGxoIny7R0WLypVswIn7zq+L+98X3TFjHLQ+P0xGo55CWKgmr5a7CFhmbHlF2b9zB7Nf1ipNU7EBTUS5ij0OpzD7vxo1fdtus7ffsGa7Qm2u5V4eOPeepURBDAgb7Y3xvvtJ1jIFECBEt3eBCQ/FVO3DNfZI01tzcqyiGaw8KhN6Q8wBE5TCI/KC9woBVDiZ5w5qyz1py0fFUTdkGAVIx7QzSaJnXDuGjSXHhulMpmjz788tnnrgB/qX726a9e85mb/yfZCyVMw+aoTklIn4LjqXBDQkFh4p6y5xCLIjHWpkXmrGj/zBev8mGPQoXt6H7t1i/8w4qVTXd875G6GdZlV1x6/iWzhofZe677vObx4Vwgr09PWIPoXSIHOjQb2qGmGck4httPOv24899xzrErAjCAQNcTBZpgUxzI8EGIwQx999b9sUPMPyNVCUoLrdPCIbOoNTqnamMne/9Hr/qPL/4o0h/y2k0eYJRL2KnURfzBhAy2wGb2lBTcRSSVUcuPgOSv8mMKiWgpLa2NBhYuks6CkSFYN4AjOEMUnyEYMFAfOP2MU8+/orNlCYNVepQWZfP1joKFQF6jZYODSJmyWXND273/9/jfvvccMF2ghf3z1278lw9+e2TA9qr1qu0RtJJVIkhCP1O0dtIZYUFmYz+LhK0ktYB9xoUr/uZDp8FqlLZ+YOzDn7wW9rnQd9522aqbPnY+NudJqqymnf3m4c/bBhUA7fz19QkYuCEytj30IE+b1day1k6m+JjiBxdRRxDV4ZpXVlnyPPoD3mRMGw4OvfbSoRPbO2lLapJNDmouxwHKJdZtqctP6zxnzUmP/Wa9DhsSkPFkfjuE4PSd7Ohw/ojkrzQgh/uvdssttzQ3N9OYhsu5H4XfCXE8TnQnxPE4r45ej81iYf25R1/1MdpaCxtAcH1HkE0WKhnk0dmj6cScC3TSNqVxINb9ytat+3tCut/f1laDVUaUkmsV+PSgXhCj8bYMnYyCMUbOWGtD010/uueCC1cnkkz1soZGdtwJK994Y0cyYiRHDMwxgE4QLStTsluj0S4UBgVP2J6Y6Q3pLdGFJ3V+6NOX+GHfQGuciBBwMDVKgm5sa5sPdwVsgg4dOnEQijh49LCOLq19poqrrUtpnsZaprG6dqaiJwj+QuQU+/AdConuRl0ZdDhssMx99Yl9/d0jzPYd7Dt43mXH217BtAJVDgepojQ7gP03prbPef7pF5IwqECJiUOz5GV9FhM9KlaSRdFBXr66i4pNXVdAVJrMifKU73MREDoBToWR+lcuOCWH4PussLb2ZnSUFEsD1mhOUMHQeypeNK3/1jzmgf5di+fOOueiUxcc39I4hUiB9AnedNyk6P4ZolIozOtj06dPf/Shl9dcumIENp86m71U++ht77779gc3r9sdDUdoAYDtxaReavaT25TZKobIcaSiwXS7qbVm8fIlK85cuvzUDr2RmAvTAlQlLt/JUXQQUUHRmtGshXMXb6zqc80rRWNjxXHCu+Z0bXnlzVpfw/aNe9a/GFx+divlxbNDYdADxd766EmSTJt1zlUvv+btP/rOvboKixKUBR9BoRk5Obo9Ho8HOxeRJCBAELhfSn+1EZD8VUbEHXqiNdcdHTX4rtsJdONo8JpIbCKXHm+ieHzAG2P/WAsTX3riwmvee2bnIoadnDG/RooPRul5y4FUwSBCelZrgsIBXlpyzIIHf/3IJRev8Hlh9UlaQ8ts9tEvr1n/bO9PvnvPgV39xoBa62/1qn4oXUlrJG5HbCx9avPNmD/t7PNOP+WM2V7kG6CEyBp9PTRdZJqVFxW6Ck5hM2Z3WiyBaYD6muYH7vnTcSf+jdpIw2fk0qWirgD9aFA5z12z9KnH1x7YFMV22KV3IWE/MTAwSCgADXzg/LTbKtRbZpEXgZJ+i/JKPLoD0xSmsIYWVlNTx3/ti7Y/AtkZhhGNRgOBwIKFszHheO8D94UT1B2jfcBAINw8gpSg9DUWm6BDt+rUkw690fv62hENJzNiLAuHNEKIny0/u/0/7/rwj371L9fcdAmrC/cl9wyr3UrjiOnvX7Jqxie+ePO/fOO6M6+c7cWugXU4mDGVNTHXWJmV6bOnnhmfCYWHmNrN/AqbNrtD9eM4INTMu/mlN3ZsSFgxviM2zx1lgwpG/MV/AcBoMOW9+PLzmBcTrtA/3bImU1zo0cODYRantJTJYaLxyRT9rZhG6l9l/1TTPRSNNTQ04KDpQjSv3EJgty+v5kNXZTDZbTeGP/v3t9RiGz/ukAHaJho2mrcY80IwBY5uTKKloq83dbo2vWPWffc+tPiUq7w+GpiCw1uYiak4nCPALr9p2bs+uuz1l0IbNmyIRIZOXX3yMcd1QFnhplg8KtJgOCwfbaUJmwstxw1ZPfirJ992zlmBpsw+sHRgpHAKW7y8bsQIN9ROsZK6Egv89Hu//H93XW9whQhRBA6YOIDiislO2n7HyxYsnTVlWmP3rhENo27UixcYpGUW9ddWu7t7MQ0q3ZGAQAkf5JFQ/CO0DNwAXWHt7a1QBCbBX0jk8esJNuJrtY5bNfcb3/3souPr0IYFWaABUhvkztG/xkKCTKIUduKKFa9t2HhoL+2aFU8wtHZhPoEOJqb/PJhM9LNjT2/+mxvP+sDHLzl2VYdSy0foeYeRRqnBjFX8ppx55plf//rtv7jr4eQQg86oJDPqFak7NWz5qqUDkUHsjd8YaN/52v7f3vUXHfoaWb6RQ335Hq4AngdorGMGO/+SMw0lotAi0JIcZEIFiw8SJtIddgSq+K087HWtRgHQiIhlBL20dbSj6whTUbIWLdhhFAzdwwSL2LWRq//hgk/858Wtc9gItoTBeUCwnsLWOELVSs0yYgaRa15jNCcwHVrxGW9bFTeiD9/3qJpgGAWDqRRpcHwLB7KL4t8CGlCDh0+CIqSahJWFTV2r8tnP/ePB3X1/f82nXn7kIGZCcYnuIJXQy844f1XcjhpYnYRt1bxN9//0iS3PxcWKbkFhRLiY9VVwggknGg87efWctml1STLHh3M+pfTnQh3yAi7ouzCE1bQBnAM5BuBZdZGPFUWAf3MrmsNRJFyAmYG0saXWUjBSAv5KNasCwTCUZEtnzVduv/WCa+aSeZSHaVjJxwWDrcilGs94YhEFrzl9sgXLtfqmwKN/eNoIExEgHHJgAA8/CJG0OcTGJRSulPACC1vmaFQWjekB9vHPXX/uuWf/1ze/87Ebvrn71QjRE438EcOeeNqCmXM6bS2JKV2P4vdZzd/8wn8nQlgyT9QEsImWRI3EFiAKq+9i846dgc0RMfCPj6PYT8SpJHIEgw2GwlSSNPs5b6WnyghI+69yAo6+jmg3nDhYNKQ/9+RazI3pCh0ZCCVCTBqOztLFFlAB0N6YueT4ubd99ZLGaYwFYHNPLRbaEO3RAC+38wKV4YIuAKGkK4lX9DddAt7SkRFM/9GeQVV7dvR3v9Hv93QsWtaORTZCmshbpKIGSTJJQuHOnSOSFnARBvkuhKcuUBCWdqNeJ6xaUFfT8czjL/zxV08NvMnmL5zrx0yCSquIapQpmzdsw14UKvPieKVk3Nq48Y2VK5foWJYAMzReC6qI6L8DKApsePwP63SrBhWFfNzJQ0BSPPwnLY/7xrpjkwzYlKhe49gVC6fPq6ekNNUsXbURwM8PevGw/xIfXrWzPxryQ1tsboP+hfF7zPlxbiio2tbs2TOuve7M3/9xQ+9QSo9AE6GDO3hD4ZNrvH1yG1M0TCE1L+3gHbqKyBtt/sSTjsf2yi88+RecjYG2m2q+1ADpIpeSlOXlr6p+o/N50Rf3sPMvPe5DH70Rpy49/qenv/jp761/6hDXaNkZ506vqfMB5ETCjA7hv7Z7Q+8vfvAcNv7ymKSIYaEP3hJcKs07Ysp1yYrOxpZ6zVvKpjr0iwHJwb4QAeZCrOoIyQwJAclfZf4eiF91AW1Lu4ZtUelHm/OXMO8aLz+yHbVVzfjCl761de+r7TNoVSMcuEt8TmiNcGg2dKW1FeHhnRkkd188MgVZsLpYtnI+1tBs37R7744YRrspoigrzwItMUNq6RBKfzgcaomLCohiBdjqd8z5wrc/0Ty9rm/P0Lf/9a7f/fhljA6iO9kT7EPp6hobdM2LXSyUeM3TD6z/2Xdeof25oJfxXiQNf3H4gGRtPeuc3oYFTjTCmP5Fgcfx82xF5mPdCQ4sPw/1YQ8KAhAFFBLS8ihYuqohgM9JuvIjgLYH19iGxkP9QVcLmSAvRN6zf1fUGLr+fdegl4cOJRzxDOcUV2KRgytgbC/6mBjhQmG6uqZ5dN/jf3qONl/OFjh2+sP3BsxLtv4etnBlw6e/8JHahkAyat/74wfu+srze7dgQ2ysGdAjkQhXilTN9OhG7WP3vfiz/3yVKCzOJy5xZh11R/novMIamxsSZsw0aeBM8E6RlSNeA5j9/Xz8K5248M83nUL+LQ8Ckr/Kg2OuFPzIw8KybUozXjm9vNxoUBPowqb25EymW0Ox/suvXtPS5hP6Wmp4a1RXJUVepDTlv6iZibwQgQbVYPRqM+y6g824Hn3oaQPT/2IsXEQigswt2ljCR4XnSVbuIFLEPGzmsd7PffMWfxuGu2of/uXT//ZPd3gMbMuDjjVBQxRGg4GamgiAwu742ovxXqbEAOeo0sxbNBcDYoiMHnlqwnHU+4kfcDAw1NiBfucUtZQanB/BieXJGCUhIPmrJPgmSIyVOu3NxEoFOjCNZrZOb77yuhNpoBptT3QVqYU6bnSLdIJHezhtURB5eLcTBDV/yWwsxlbi3rVP7M9LWKNlHEFPQBCTsFhB9a9f+2Rts8cb8NKaTeYTe/s4BaVl7NhFLOF99pH1d377WS3KYO+FqROQtYZ+vMm2bdqJ08sxmgZMiMUmNYKFXAZCg9SHhcOcDG3xmo/++Xt5qygCkr8qBi+YQ2OtHQ0w/tKhRBXkLEs33v2Ba7VG2tSB+Cvb5QnKjkLPiCYueqBGamPPVrZ8xcKEGfea/mf/9OJfF39RNVAljU2dr3/iix/oWFBjqrSEJzVmh3FDvqkr7hSIRepJ30tPbvrwe3649qH+4UPMZzGfyfZuZNs37U1EcX4wuo+03cUk+IsoUlHCAxHiLxdrUReVyihdVRFw1mVUNdejITP6NquYgmzCFu/UukTLyq55htfQryFjLZY8eXWTici8NdD0WXaSop4hiGYXqaEqbMoc7EpWF+tRN/5lE7OuKErQYY+MGUV01TS/Mvv4mk/++42fvOEb6C3qFhbyZG/shU3NNCx3NNTBQ8Pf+vcfzT2+s2N6G0KeeuTFgNWqKz6sah8FK5mtFFE/dEBxikciwTU7MYRfRGoZtZwIZNpPOaVKWRwB/CZPmzEF819ZXUj3vvLOKAz6IKCwxYsX0BkcaWuJUoBEk3Q+XW7tRPrgoqVzsKWFZupP/H5jagismKZbSnmKTYvCO+UHnYPegScteMLCrNns7z/xrqjWxwJkfsuVTQAJ4qIxMD4MhrpqXrWhRunYvzH2yiP7XvrTrjqrUzX8tB8/d0L5UrFFLG1gNjq3CcqKc0ewi2FG/yqO/yYQLl8XgYDrG1JEKhm1IAQwajOtq0OnHUcn7ltgFB8bei1ahF1yeOexHLTipjBSMTQGs3VkA2PYp/68NqsHVFCVDnckE1OpqollnGe+Y8bF156j1hgJFiX9KWcki4foiuXTjXo92YC7agY0MiR2j3wVRVs0zSJ2rMVvzQC6kPjIoDQL9fZwI3N05i/5q1KfO3UBVdbW2ezB1utEThM4rEpB0xoZwcw/uZz2KIIneYdmB+UFTW3mvC5qc7a+af0Oa3iS0g5LMvARaCql6XiY6WXXfejk2Uun+OqgR2IsSmhhtCxdXBNa20EgXOF1gQIIh58iqGxAM9w/RDnB0RwLBIkHHiJv1UJA8ldFkBawQutqamE6NIB8Q/G5GSum8sJz62hvKey6mvu6hBDeUGl107SZnTBAx8Y5mlWzbdMwtfq/NkfTqTjdEr1yH/vwpy9pmu5XvThQjkgEo/KoDXmK4KWi608qmKVgZyNKmWEt2ZSKRrL0BBL00jEcJQENR7QdIIsfZbS15tYmMr/iU2OjovIHEZ/uNH7jGRmM7981VlwkmMznhTQiGaYgO6f5vBi/RkDS8/xTL+WWp7whmdpxWAQyk8iCZh8ITxoi5NYgJAPTkTis+103rjE9MTrtnPb5QPcSFCbOrMSd29bl3DkeDirFFoeXBCYUwgQsw1/FypHxy4CA+GKXQZAUkRcBNDYcRGTatG/zhA7j+nZSffi+p8ksoKyaEbV/aCdQGwIsUOsHC0CB2bN9f2oIf8KSHQERxHJP3VZ1DNDzvRtRH2hhK05vXXP1uVF7AL1IPgcyaZIsqJJ8KFPxatgFP23CKimsIOQqEknyV0VgFULRkjBg09TaZGIJ0RiEhHBxIQlUCJxe/erabQwjU+VuFbwLSeWaPnMqlgb6PP6De7sjh0Yb4otyT/4O0p2AdwmTIuSTspW5oMXSUmza1FCHR9gFe9k7/vbYxukIiAqr1CzxTsWzwvEIMuJTw6LYMAnLf4nBNfoE+VJWWPzrqmeATlErrjK5BZAhJSIg7b9KBHCi5NC/2hssWnA4kUNvB90fU02E2e9+tvnyDx6DvY/RdItp7eNlgWZMlGiyGTOmb1q7R1f0kaHhPbt6l7S3Y410WR3ogH4Xxyg5Xk3AcU5hgAiMJlDueJQdfCP4h3ufNKJsJBZPJOKnnXXq2ecvSvLz2XzN7G8+cMXd37jfwo4dtP8XrRDCHX1JwWjOuJiQjFfw0DjapBxmWnTFHw6N0KcqN5KeFIblSiT5q1xIkpyspkkNGFajU1qxpYS70eJ33snVWQGDRgUbJw+2shpmv/75g2evOaZpNk8EoaOYYJKtDrOhImVTUxOy0pmuK9r2rTuXnNKe0iOExifyEn6nlLwi/ClTcvFyVNEyAHAKyxKSiQqT2mw57qyEX6QG1WD9Ffb8+e+v/7R7Y0w3cCQKJd7y1O/u+3HjBe8865J3H4NsTz9v2j3fUyLhhIqZCYzzIQ7O6TYMHAEl/Hh0/NibhytlUENpNpFYkpeNE7zIfNRdzAbw4y4xr4KyY2vXQPf+QTJgRUJcyJBLEGXm3lES5EOFEJhsY6hQcd6KYqdMa1V0dHUmbrFoVLCuxJpk3Qj86Fv3sjD1lQowHSsINTQq+rAVNjAQJqok0wF157Y3aKZTNLuCxEwUCXNz4kJ1yyGWqm+zgJd98paPwYzLm2iiK9YQMNsH9xm/+uGD//PlR0zM2OrYI/uUuDnMFyOS5iUcaooSi7tTdDxmhTivCvHACkw1PFbShqYs3eFFQPJX2fFHw3VdKuua3YBdwNCu8uaUO+aCaJrpe/25N5747TZIoh/z/EnzyssfSBOg4C5erlDfIBYzkZ26ouzdu4+EI3yyOgPpMPxKZQxp0DVxwSOurBLlDcyKk34kyyrY0XNbkh/f8TthO4qXnOhhXh/QYvXPPPTK0w/uRP3OOW+ZXkf7oxrohWOPrnTnEfHRVcSm9aIvibvwpDPJfFjOWGSWh4CjobFMCnihFaZ2wac+buoV/qa96cjybyURkPxVEXQzX2KF1XXSrhKcPArNS7V1n91wz52/37shIszBCk05TjwQB4pls8HBIQwScf2L9XXTFoAlOPQEdcXWFZMfDwkzUqhCzoVJV9AmLmQNJ+7FZEbKE09uRtnrr7+elRT5aqZfT9bd+5MHkn2sbRara8Uu0QYGEkFe0LAETwkPnYWedpzBJvvNT+0bRire4OAIKuWQV1bx5GMVEJDjX2UGWTSLUQoNhvA7mof2IawgR1ZLtO2hbkU9//3Vu7/0nZuVVt73y5BiQXLckZA3FYwzyJZNWzHOhlaN5uzBORmgmxp33GL8gl/QyR1mb+6IvvLC69s37vbp9UODw4Eafeny+dOPaVuyaiqNEyF7XEVSGPiL9/+YFmBdM6f2bIoLg3mUHJLEqlJ0t2P9iYd+s/Gy9y89YdXxz/x+K0xZTew+jWWNqkaDW1wRUzETTGnwSIWABBBQgVUVpc4es7PVQTLBr6EsSvhoCiyDjJYXAclfeWEpXyC+2QprbW8KvzlSiFCQFxzIxYgn/bX1b27t+cHXH7z5c2tolAqXYMdCBOXEISKw2PZ1YcWELuIxkpbq0XAC2cG9xtTFeopZCm2HfCyb61ZPPLD+xade3fTKjsSQ7WU1toGDK2u9ugf65qb12/T26Pd+9nmUBRsQogBgH9BI4U4YAKNsWKW94tRlD29/ERasmPEA9YDgORFBLA6b8/UdGAA4nTM7GNsM5kINkQs8uKMvSXzHHR4FcYsycB4U7CQCCrpDAj5UdEfDOIXInFLKh1JQfjLS2AgU820aW4p8Mx4CKnYBay5k/F6QFykJGI/2+jGQU6u3rn92+8++8wRW+FmxQmTkKQjUDKgI+KTNCHv80afQ9oi8xBGvhjIyxFdcgrmgoHC2pW5m7sWLhZLRgmX8MdiWJ/u+8IE7f/X9P29ee9AaqvOYLYrR4NeacMa1ZfisqG7E7auuupLVMhzDgUQgE9KeSARVcFyHwtJF8fEXe27VsXe+f6WlxW0aSRR6E+TREY86NutIqnt27YPA+YtmYbcisBL4Cw4eBArmwh2sJ5ygME5D45Yi+yWKwvvdYgdXRRkMiRNWKF6huly2TPlcEgL0kUhXWQQUmIC1FJVFqn1jXMnS7bjnhT+/9uMv/ZGOxoUlAYbGC3aQk2pXsJfATvAjbP0LWy0TO8ak27aFbZdJB+cqxXhyRZFg9049N4M9c//W27/00wObhxM4ki3Z7Fda6zzttb4WTalVLC8z8b3SNTUwZ+Yi6p/yxCnWImYkWinc0XwmUmjs8usujOtD/cO9CZyblh6HwpoCDHn1HOpFLtOn14KhIFnXUcfUd1vwF8KFR9wLz310TKe9YDRfHQgNczoeHUU+VREB5/OoYp5v0ayoYVLVAKmDKqbDKHTKVBx7Q+1n/KqjTYgL0UjN4U419Uh3ct2jW3/x9WcY9nTHoDjETCCJUooo4h4bovmDr3z+XhyMaxte3euj/eIxs+elzbBQQicyPMIvchdyREiKCk32h7s33X37w+Fe9AUbWLJWMWqwNU0yoSbi0HEU0+BsCNYx9a984fbu7SSJKEvkkobJkZ/Pg6xSSHBI6QH8dd5VS7wdcdWPuUWD9CwEo09KlxoNRyCnrhlZE84p7XI04AL/0vhrVGG7D3TTR54L2ahY8qGCCDgtrYJ5HO2iFdYxdQpGYSbkLwcoaq7ULvg6P8Xj9zTZI74XHnn921/8PSzCqC8JjoCheYpRnHR5PJpNhyHW6+wn337h4K6h4X7TNDScQgR9BN0rv99bU++jRphDW0KWw1wiK/Rw+w+w//vhfeZQbcDTYiY9CvNiqsE2SFXEtjxYP4CJAayWgqKmawFzxPfnh9ZSCydTCIYeMWDIU8oxgzI1xFJHTzO7+db31LTh4KGIrcb5SWjoUSY0IhHAQVLAXA7UQhcTvUiRA/zux8I/EZEcXVf6QUJufBguPJA5hUg2JAFRle8S9soDDv2rEyb4+NIX2HTRRLADDF2cwnBaI3pQWnxI2bruzc+8+47u1xidKsmFjSOSpCAO9ieNsDu+/PQzv381GkzgEcSSSNCOPmjxXr+nnndtRbMknYfjgXTiwhNChB/khSMw1j7xmmIFVBt2tpkvjzCYEuqjA6iXzmT0rXt6A3UhuUsmkxiVciIU5iEyFzkpPrbs9I73fvRqpTGi1CQ9AVMLGLrPBCsnEhgdJBcIBEBbgrnEXXQkQVvwwDn85XhEwgLuaTLlYCF5KDjKgLXYihWQo4wyAQJy/nECgMrwmvQv5vP5jCh4oDgHCgNrYJAKFmHgCzumdu8Y/pePfP3yG86/+Prl4mRW0p4cJhE5OPlYLBZkP/neYy/+cTOL+X2qXw94saoG+huG8D1Y9ug1/I18bN8ZKeMeR54oLlgSF3qgWAf16rrNqoHDRWhHxNRIOv64uMyVRAGFjQyFwyFWX4sZOwZ1r7j6p2KDOGjcjehVY6ddPKux8YN/+vXjs6fOacHa+KS19bXtr25/iUCwWXNz86FgDHXE+D1+MMAycMIjfj/wmC4hvUXNJlEk0sNgPzEwROowrVDiPxWSwCYBZWlJJH+Vhl9OanyHXQ2CxtwxzgTFAUPOij3hOuks3iDpaLQ4f9ZUoLeo2ODQo9Wa4fhvf/DYvk0973zPBW2LaelMyiFjV97wP//Izu996R5vokU3a2jRMbqSpgkdBIWKxGJxb+Qdl15II/u84VHzB0nlNEIqE9esSJtLsI2vbPVarWjAFBvNlu5pJ1iMb9KAA0v42kLVSCQiYbOWjDZoCnJyjvRCBSosr5/O5qxo/djpV6U0Q4udyxYw7WLiryRWm07pe2O/4C/kBYaiHwAyB6PEKBoWMBKdCeOL4krjlF4oYhjgUwb7WOPM4qTI2GVEwPnul1Hm0S4KDIBvuviOp7QZhR1//LEbn9s/OWi4/oXWh9k/TeUzhmYs+cKfNz371AsnnrHkqndf0rXERy1bTE1CI/CxretC99133wtPbWxS5mpGvchX6BrEOKodaNbrpzWet+ZEjCvlclZuOYmXMYMZhzkVBJADDRAjgA5yKA+sYRqm1+sxrOTIyEjakiFXahEhVABEJyZjgQb+gGdxITyN+LRp03b4+hKJBMogHIrqdCdFCKIjsIi8s6OmiAxLiPp7rcYZDq9lx5PPlUZA8lelEUZ7s7ABPjSCQmgib2mgQaQc9AZuAKowj0/RfLZ/w+N7Xn70G10zpp122mma6oHdgKbpjz32WCQSBXH4ox2aD9utptfupXt5pppMeAev++DfYecZ8BdcIWVDk9dholrjJ0s0l3P6YILIBJuBKSx07ZiJE+Sa2vgRP64kk/KSJopyCs0vhzNTIqF/oas+NDSEAoCZcSUZrPENTGPQlCXO6+amrcRoY4kooHBE3+BRi/X3DcyxW0gzLUFaARnKKPkRkPyVH5dSQh22cQuZv2DOtpexW2BJDgTBtR3wIaYDFM3A6pVANBGJ9uiP/fZFiEaz4ne0L59P8eF8RLJWJ5WFViDjJbc4sAx9+MZP/M3Ks6aRTX8BTig31H/z40TL+mA/iIBm+hLxJC2v1jQaYycnlE7kZTI9OWLFNB9bsKTL20jvQH+FsCRFHdeREJFPbjSF+VtMFhjG7KSKcSmAhD4ez1WoYMAFzp0OwkY9u9+N46dfApyi5gn1DDBWnHHfOFLlq2IRkKpvsYhNED/dGDDe4lyUpK4hYGs4lxYtj776Y1zjC6dUNHFPS/nQNLELKZbp+HRvzUgsEY0n4nEzEYdpFFgKTIMBbywQgvkEDBYSFouOxIOWZygU3zNnWes3f/jx1e+YTcseSaWhLbBgm4WWjKE64fDXuUQI8kZIZJhd+s7zbTMBvcaDnbagVRpGPB63E4ZChvGY7EwYZjRuD8W0sNYcMxtCF1x1+r494aFBGG0RhaXdWAiMH068RQVDLOHwIFLgER4PW3Vl042fudTSBywzBoKxjKSOCVdUDCNfmAYBMKlJh9TW+Jh5AABpKfk9HCFgzq90zjAW0ZUaK0GGwfxXw3khPdVDQOpfVcFagRoyC7tQ2AaMRUvDXPQBSaegCzu7rjjjxJHowGuvb7Di6CopqpmyrSc2QvuF8YIH3aeR2o5A58ymf3zXu5ed1UpqF1GcqLug1AJwUFhtEzvzkvkP3d2y7/W+tqap6K2i+WMSD6fKJu04DL+wU5Ctmw2tgREj5mlSzjzrpONWdSj+9Im8KHJFHWqkscWrA9ff/I5ffP+BGtaksQB0Q+xcyNmThutKd6TG4scJe1gzX6gPSyBLFyklTBKB0trSJDM92pIRT/hqcWxsDD/a1KGjaclSHFligbygNJlaYvqShjXXHr97++k7tmx9/S9bt2zcbkAR445pSntn0/xFc1ecvnze4jkd01QdQ/mU1CEveKFLjVEYri6Jl0JDI8VHZ+ddcfq9fQ+HB0IevQamaIYdj7NoErZe9eqMudNPWX3iyacd1zyFebBVKmwLqKfLNRQaJUJWY2U2RhlGBacYd1RY+kEoQSRdYxe995jde3e9+NDWWhUlcH3JBfs7SSbbp031Q22trzuUFib/HgYEXB/tYcj9qMlSYV1LwF20Y0PW+EtxEFDzG9WGsVxnxBrEGuk5K7Q5K5acf+0SUgdAj7ij5eKO6PCgWcNDvafUcA8fFMqTOSJmOnk578ERGBI/79qFe/ceePK+dfEk+o2xhlZ/15yu1eeuOuWMOf72VC6UHfpbEEfkBZEOZaMQlXJUKdFF97IPfWZNpC8Bi1+v2oAOtavrWq7coYLpoT46xWOyNFiukhy9ciR/VeWz5/TR3F7fO5ikPdO5RWWejEdrB5kI4zAKGiy6RpwsUgyFyOJCep4v3YWf38UTD0rf3PInVJCQ3sfe9+mz65r9Tz72zEUXX3TSacunzHflRd0rGqICm1BedMffCtKWqAZX7zhjgjehoAbYez9yxadu+ooZ8eKwXhifYQ9Jd93dlU4DUcDfzGeEgTOtFxtAin01Ckgqo5QdAclfZYc0n0C0G9pFp75ney8ZTU2y6eSTDH4QrVJQWE4U0asSwaKF50QpLgAqHdgBG3xdc/Oqa25aRdTEa0fsCcf1O1Ei1NLNF8VlM1Fs0gTHgBHzJmQdobK2RezKv1tzz3f+4IORiIol62XmUAwFJOJgxYnKKt9XDAHxpauYeCnYQUDjB6nhIFta11i9rzwauXM5ZSnFg7Kj9JhJpWElMbgEWkiTFyTjbRlzBEeNefFhNRCZOwIKAHolCqNysDV/d9zC5TPjbIjOvC2rE9siJuOGNTImk5Y1QyksDwKSv/KAUv4g3paaOxrRicEqxpKGwHILl+nR5L4rW4ij7IC/YAGqgbzw3UlfKEJVSjGqOg5tjQrlD9R/xLwBp9ebb722tkMztclu/5grnYfwHyEyS967J7X1xRgRZXAFEZD8VWZwOVNxHURoIvxOGoKCJdwe3Yt99Wj4KzvXIgkgNf8FQTSUhjzL5xxWyCljllZFik/6Kl/2GUmiIOIZulPW5WRNumyaRt0eigAROHxgNjv/qlVxO6zTWbNl08LoU+QZYBVReQcEMhBI30QISP6aCKHyvZ8ybYrm1fjCk/IIFSy2ffvOo7n9jENIoDZisFp25ftPnrWgIxoboIUB5XNiHCAYlCYU5cO0JsTDZgAAQABJREFUSEmSv4oErIToM2d1wboUq/DEWpYSJDlJaZjHiMHq/TCoAI465nicYpXRI9QuCBTqWO4dr0QcJ6Y7d9LCuHb2tzdcVdPkxZaHZVTBKGvLGgj2u3OU/moiIPmrWmgrrLNTwy44JpmAoa2VwZH+ZSk7t+6hVgmXv+9ahowOuwjQVpYTdaXdZbErGS6TX5gdEZfh2uIRUTW29Ozajln1toZV9BBWHvyhf2HFVF9fMEWuWUWUj5VHQPJX5TFGDmhC6Mh4WFNrg2Ul8b2HDX5ZMsYUPqY0k9gHNLeJiwzcpFaWLIsS4taXiko4dmRRISxBwGaKCk7M5YfmRvaz/evtVx8J7nw+vvXpSM9rjA0RTYGsaJAKDgYf771U9ScsJUHjViVPNzibggT7QmOCP3Yt5JuyICDtv8oCY0FCMG3X3FLfvRNnaRQUv7BIqpL07NxqLJ761vwoUzTPOYhWXsPDO4rPP75r744De3bt37frQDKWNOJWMm7CIN7n8Y9Eh30BvWV67W3/+eFAOxmCgawwFnbM6traZjU0HPGqWHRdHhUMm3CEggO089pbE/7CvoOHL5ZEvVrYw2xKZ82tjTbbR7//pTkxcgw5MAHH2Rkb129bfNYSCC0nMZZWQkpdWmkyqVExEITJ3tiQ3LJh19rH/7Llte31tQ1G0sRmG7ru1ZgPc4EeOi9AtRJYLe5XI1bvGwO7tx5a2tJJhhSwUANfaezEVUv/eOB5j+0HbnzacvKVxFF2Ykv9UCiEI5fKbBo7+XIdXSklf1Xl80ZbxAUT1tYmHCpE9FUqg2WKrVrenVv3MXtJpsFnXh4+XzlKAyUJJz3iO/ri4/t/edd9B3b1tdRMMUZ0X3KKGvH60AtELgnYSNBW99hvg4a2+NgWaW1G7MD+nqWskyAQK4pMdvLqleue3jjUjVVc6FviUyi1F4/dIsOhELal1XBMr3RVR0DyVxUhV9jU6VO8Pt2OumbxC2hC6d0BRVFHd3wsbG7lfW395ljoQrF2OosYR9GI+yErXtlhKF0+XyGEL+hPv/vM4795yRrRPWZb0vKaGKFnXsvUaCN/ngsmRKAK0bb7YCpQGnx4p7EZs2eRXiRqzW0pZsyqD2NrVlbnMpkvjsLwWWDkS1QOgnGuipXw+rCTWun1LftHcBQILO7DOwoAqWQVFdba1qx6MBiDH//RNFRCtnTQouV5bd1+Gsl+y7WiA7vZA795TEvUm8M4BMVnxFUTe6iBN2j//dQlHoU1LfgFyOo+RfUqU7saqfOYZm0MgdV24AzKYk4wz/e5jPo5sVUczNK9+y2IfL6qH3Fhkr+q+JEotAQSm/zBhAKjJ2XMGGfHPv6H54zht1YrAkI2i4Tj4GUsNsSu9mLreoz9CZcFIOhMhNiqEUkMzV08s6mdyAt0BsUs1WfHT0gLtnuG4pZmtSwphT0KCqMNZeEMZbC/bL9GheUvY6UQkPxVxa8CGk8rjhBCPwf9x/J+45UNL20yo+WWWkVsxspqwXG+5rZG6inSKULkaKhrbAdeMjHaH0ic/raVsJkQRAPNCxc5g3V1dY2duog3aS1MxRa3tAHteIUqQqyMWhQCkr+KgquEyGg/WPnsYY0tjWhLaIQYRimjFZhqed7YTnvpvaUcvp4edtFV58TtCOxTsXQU41oYpRcD9VlExlUtbimhWzWt7IzLZmLwCwJGfcVt2ku6rBApzFC3b95VVplSWKEIjPpwC00k400WAY+XNTU10JGqZXbYS0/3qP4ySz3s4tD7U9nF1xx/4dWr494BtSap+nCGG06JwzJ4WgefU0D8LGAD/uS177+Cpi1z3ytseBjd7LI5bsyvakp5ObFsxXvLC5L8VfGP2M1VMMFvbmmg0Wf0htA4qYFlqwiTKxCUkv7+1EJidE3FBVHI3V2AyQmnVFx/zHOfvMSJUqIniBNzoYHVsms+dNJnvnWz3paIKYMGi2EgjPgrp2I4WMDQE4tPmHv622dTF90Vgby8CsH+8i5XVHEmSzgUyYOyK/eJqirfTxIByV+TBG6SybCEqKUR51GXd8sojL9otu/A3t6sUbXyjrFNssolJMOwl0cjFUyrZ8euqv3OT/7h7DUnJvwDw3Z33B7EIY+gOLqwChSjXkrSVqMgk2tvuBRUBRv7rP1WxZD9KP2LZoFLBQl7ve57Yz/VEoTlvkRICdWXSSdEQPLXhBCVNYLKpnd1qjpgTzUbGgUr/Yca1uSWf9umNyG1vydp51gIiGY1yZoIzYsnziPHrZdNMoMxkolDlvjO+WSmqDG9md3wydM/+18fOP2yY/TGIdszZLMR1YprLOnFsXGemKWFP/iJ66ct4GlgppLee1aM+GMhUTyEgXzHJMzJdwIKA/E5V1q1JeLDgZeiXKH+MBRF+hhthhXdBpbTcxsx6I90WriTj/SUGwHJX+VGdBx5vKk3wwSfDCxtbm45TuyiXqEz5d29/QA2orjj9rst3n6QPmU3UJSkIypyVtPXycx9ycqGG2654N/+6+MXvev0OcualKahmKd7kO3rmOf7/Dc+uuz0aTQM6DoTToBAFsMK27IlnIhCWSvfah+cf8m0cN8IizJlhA3sYbteibz45A5SB7nRxriTpUcU1n+VhZH299X92GCCP7U5acVxfDX/Xc4oYpMuB9Q3jGTj9Ov+Q5F969jaRzb1v9fumEVz+iBMaAiiscJfinMxiXtYiTpqpYidOK0rYyeyv5F1HqdeuXQls1dSoDBHAV/ATgLnO2EwHbXlCVOal3i02LaNu1Ub6yUFJMCG2xLTEgjSpLLmKnnIBDcMZHp0fagv9q1PP33g4F4cR6T7rIYuz6krP6xgnJNjUyLyE5Tg6H5d4S/f0Q1udu35F7m5DR0PMbhMOkG5HBbQYP7x/nvX1bK2V5/fvm9L8vc/f56OluCbG5YrF8gBLyoW1D18cw7TlwcwImf88oKqsCU0LizfwfLDeqbgxFw8inJx9UdUPDXSb7GtG3d5WMB1EBGnLZyoko8lCwQNMwnJmL1zw6GhN5kSrldG6vr3j9Ah6xjjhPgSJBdYgKM52mH6Ch61kKvM38ACtf502xc/+yXBkZ7HZLrq6d7X67Nrd204uO2FN//v2w+F9zA7Qb3IktsRzZTSZbEXnt5wEFsAkdEm2ib0D/qTukqqRwmJ3QVw+YXZqrCy0G021M92bNqlmFjAlek/OuOPpVAY9sCIRaKWYWmwTLNZMm4lBqg6yFqoYCXUTSYdDwHJX+OhU6539BvMlS8SqLKm5gb8OpdrF1aIpPk3bL/AtMhQDIsEn3vk5fBBu8bq+Mwt/7V/h4n1NyVTWOZ7MhyO3vbJL/3xnpcx1oZBc/TYqFJVc0Si6cwAK9+/8JWnt6ALiU1sUE1yuLuj4QldbIutX7c3GWOWMTHwFF+ISmc1zl8wuE52tTrUOgV7vEE/NdWB8hppjJP90f0q8708unGoSu1Fo1JYe3tzGddvi6JDj8CYdSJhqLbHGNLv/+UjmJRLhpW7v/8rqB7YZ1m0bbTKVMMc3cLHrz+aKF+4Q7wwb948FvP84gf3ffdzv4vDbCC3E1yM5PHyTcOVIiP+KMpPhwgkU1rly4/t+Mbnvze4j2l8USMptLlfaiSLsRef/AsMTcE0ozPlKjCfSSQ7DLpoRphGFVNIjY7uehLR6C6MafELQka1cFrfoTiABuYpSnWlkt4yIpD1WZZRshSVDwHgrbGG5sYKLIFEdoqRxJy+7vc0aGbAp9QpZmDH+jf7d9E+y2hmTnOaqGHmlhzdIlAYnVo7e0Hbl77+b1hN/eyj62553xdZnCtBsNjgPEBcUwEnaEsUG/nA4F3XFeyaHdzG/ue//rejfUYjFmXzRY7oslFkbvGQKYjNzCH26gtbdRrez3UWwMEvCk5D4ZSEHOjCFHGa0VIeRscPo+cMzTPO1KilRUx9yNTDphaxaFm+KCBD6cJhbF+dXnSZm6EMKRMCkr/KBOTYYkapI/xhxuxpOHOCDvLIo72MLSj1Bh9Z1kUv+CiYmkyiW6p6FB92UrYtNHFPLGw/9sBm5PPzH/8RCkWqaTpa2IS5IQLXRGg/LeJAWH6w9sXs3771qdbO5kSY3fz2b37zI7+J7WMMy3IgncgjLV3UPOteSI6uOI4wp+SoPGlLJju0j33mY7cPBrXV559t+GgvQieO8MC21VBYEgWw2c/uWFtjt2KnVtRGOEw94gImXM9KsxWloIvzlIHNbXGpVhKXAkMzG7Z1kWSyfyR2KGFjF8RDWmNwznGBs96+1NYyq+fB9cFg0FUJ6a0UAtJ+olLI5pGLhgRHu4A1YSGRleCtjBSGMv2KwKiMNABkA4FQl6CGqDWe5j/87vE1lx3zwC+emNLadcElS01s/s4LUsSNqzZYNoCj37BZIHpLUxZpn/virV+49evJsLbxhd03X///3nPTVedetoimAsVeDNx6o1zqGJBynCCvA9vZbbf+RzLkrWtovfDSk7BptIgj+mu8O5ja457G6pPszw8+64u3EK07gkZ7wGKi4GSVqpncdt82zSQWq2IXWGifhp3weNUZsztnzF7YNWsqPF2zmxumcfsUmz3xyCu6HeDCsUOZTZviY5O3ao8Ojq7SUfAk+atqHzLaF+cplXV0tqIlxIZh41i+3FMkSFnwyUKIpoU3ZhLbxLN7frQx2ev/4VfuXbZoacMsZmqGX9d0jNcU6XRVM0y0Z05hS9lNn3rPb3/y6M7X92vhwJ1f/fVDv2y5/qZrlp/TQr000DKvMZSclBPMIfIci0XSccVfEUsQE5mcmMwHjomz0D72rx/+jjJU49O1s992al0jiyaYx085Qjz4RwW0SAY7Xo3teY394d7HdaOW2fi2Q/cCt3PBXO8SGVGhKJxM6g07hmMiFc20tXhNY83MOdPmHzOra/bUWXOnd87iU5eokTOBSfkxdKKxjyHZkoHnoH3ZNjbFTxHq6BrJp/IiIPmrvHiOKw1fdHz1FdbcpuletC8+E5a1SG9cAZN4qWlYQag98fBzrb7pg5HQ979x7+duv9qCzSUKMymnwfZLsTESpuls2dlTFiy47suf+emhHQOeZMPgvsSd37xn9iNTrrz+4uYOf9MUMtESLIasiDiKd4K8wDdgDA/UmSR7/k97f/b934/0YNYPGzerr657rekX3jPOX4ijhqBY7toc27dn/5t7ug/u6d6/90DP/n7d8CtJn27VgbVocB4kQ6dwoyNsiq14aDtJy/B4NGzZWlvrmz571ozZU049a2VLO6uFTBiUIW98cIKI+Sco6iFoEH7O0aKk1McGcw0ODGFfMJRncrUuHqejNIWyY8eOuXPn8l8lggA/HbiLR8cPjxPixBSAjfNKRJB3+jqT419/oRQMsusu+3d7qM5nNXlUNHGVNKZCnWhG48dOtSWuipCuZNFBPUosPuJpSlz3sbe/7fKFXFEZX8gYb3lRaVkfdbjIPjYRZPfc8cwjv36qRmk24ra/AQsRh+ctnn7t+y6fs8xLlqXpHmuqMUMCV4DGyCAdjP05iHFSjzik1uhn9//vK7/+8UOaWe9R6mxL07w4uBbnj+NAAQODifhyYhIWHWczYZFFg7BpsDTsUq9Ymo2X0Kqgm2kJSzegYVlqYt7CGdCwoF7NmDW1a3YDNg7jteKZiqw53rxVEBmlOEu8ErUAq8bZJy+/O7wPqaCgGRErGJjKfvDrjyu1nNrSVUhXTP4tCQFwDvbjxR5K27dvl/pXSVAWnRhfZVxe1j6lrWdoxEZbw5x7IY256JyQEbEYPmy+sTIZzdd46+Kxgaf+9MLb1iwk+3WnE1SUcJSW7ASIlTFADo+nlb37Y6s7Olp+c9cf4xEzGfTremDnK92f+8evnnb+sjXvOn/a7IC3Kc0LqH4hDMyLBL5QLVhswTSXhd5kt//7z994vadBmxodMbSAbsNiwgR6mKzwYskQJhj4Fzup4EQ52KVh80IkxqC+HU/YGFyn+uL8lNnzu+Ytmj1letuc+TUNUzkIokhIjjjcCAN/Uzzl+mgQSwQSC+NyHPe3tbWE3ujFvj4iOBGLk52GO5oTX3rKh4Dkr/JhOa6kVEPAbzVYw8s6prZ274xQLwZdscwQ0bgiJvuSCIxrPkRlhrp708G9m+yZx0GXKKaBuZsirwyRC59Ahc0mhs8vvGHJ1Jltd3zr/5JBLTpowIbTqzWvfWDL+qd2LDt58SlnLT/p3Gks4FrfM2F1+MwgDatb7KG7Nj/4y8ci/aZu12Klp8ePTh3IgVZiYsQJ8xQaOpN+j+b1DkdiNghLifrq9ECTv21K88Jj589ZMGP+4q529GcBvtBNBXeLSgEJ7qF7upocsMyju7AObCIVlQQnG7S1mvYhXdFNi6zwE7HYyBCrQX3TAt0SpL9cCEj+KheSBcvBF1plrVOabLaz4DSTiogRfahgdKcWB5UbNqhM19D5euW5TTOXLp28/pUuDlEYCAFEgm6Two4/q+NLC2/53ld+u+H57arhV5O6bjeYg+bLT2xb/8Lmn9ypnnb+CRdcfmZbFx8FFw3bfee0mOpaCr/FNr80+Is7f79r/UGf3aRbAfQAaesOlrDQn0SfESNxNIxoW15Fr/VpAe20M4+dgc7g3GlTZ9Y0Tee6HlQqbC/EzSJSBRfC+QMo2Pn5KJxqXAJSIls6mrF7IhiNThS2PIm4vXfv8OIOLMiUroIISP6qILhCNB8mghftDZoEtXMQx/SZrQpN5vOf/tzWUGqhRC9GkJdFq4twLCI1U4wMedS49twTr13290sLzUQ0a15It4YiqkRCMBYGMyuI97L62ezjX7vid3e/+OdfrY0FMTJOjdnGO1UZPphY98fdD977zMIl845bfswppy6fvojvFQHJhE1aOcIj1ioeZOuee2PtUy/t2nZwuD/hU+pV3YthLIM4y2KehOIb1mvsuQtmLFgyb/a8rqb2xmld9X70UuFQYHEJfzpE1CMVgf6Qo09A+HgR0t4i/yqspjGgeC3DNLFNmY6l5Mbw0ECU2ZK/ikSyyOiSv4oErKTo1EzRYMAkze0NGB+hjQ+c1lOS5AkSiy4khjxppi2u9O4P4UAM2rQ906YnkJD9mhdb0CSqICqBGsGP7Z6vvuHks1ee/IVb70iMaMmoDZsxFSa1hh7tV6zh2r0bB9/Y9NSvfvpwfVNN+9SmKV1NixYtCIWGfN666HDi0IGerVt2JmNGcsQwolBmahp9LWQ8S11Fe2Ag6G1Srv+7K9ZcN5f0R5RAEB++yOluYHZRy/7MO7aCtSl37gwrAYI1MTCHpQGa32P6hwdHMtRY9jJIgRwByV9V+iKAurhDF45M2ad2tcMcFPpY7vLBChUIjR9NnFb/mSwej7+xOzrnBAzPFOpI8+JOVEQ0W5ohQAWgYWIgjO9pZkZYOMTCQRbptS9Yc+Ej9z+ZiEZp3k/TUAAzYQe8jdYIBuQ0j+2P9Vl7g0OHdkb2bBgaicRwNJppUIdQsX2K7fej50v7dMBgQ6POL85Z1KyOmU3HnjHjonfPha6Hd9AsUS3SM1GG4s3ZUKE0/6RqN+Ef1BiLSQ9uNe68885bP3+Tr450RxiaBRpQXsMwNa8HJ6LD0pcNDubbFH/CDGSEYhCQ/FUMWiXHFXoKvvGt7U0YciYDy+o4PhYmtDBQGKbmNr22ec6yEzP9rMKKQW0VvT2LxSKsZ8/wYG+052B/36FQd3cw2BPavWMPSIabauBIcLRhnUbBuBNHz8IAFXxBIMCgAQ7DV9BHk3Y0iJG0WrAhSI7zgcMqlmkaphJHn9Fg8QQbWn3qCR/4+HmwyaAtssmMixxRq6JgeI8/ZW6aUNsyAWXwYW9uI8ZefnKzHqtzGg+OxeuaMRVLIGkkH5xIU4/qUGiYyJ0rhmXIWIrIh4DzEeR7KcMqggBmzVSYjAcCPmOEWqCglYpkRUIFF/CxMHok3kADCwZDk+vdEPtYtIzw8x/6phZttEwVehOoAwzlsZtpugBtliZZEQ2tGXYMKkgqaWG/CKw8gsrJt2igzh50JgsLCmleQezfQFCgU+jqByowMLF8NfrAUJDVxj70z+9ZeVanpwEnLtLUI3rfyAT/qd9KQ+fkHD2RP5X5RtavsHcbYo/e93SgNoChTFAm12uxpxtMywxd92O0EdoifiO6D/bxApW5DFKcGwHJX240qudHAw/U1Qz2Y+YMTUBM5lPupJRQg6iUE8KRSz8W6E02H9ASdMe22q5YBN8fTEJgqTiqYSbtBIaBsGbQgB2BrmGNJ/QmVChhJmEjjax3796r2R4F5v+2FwdW0rwgzLZAOTAk4w78xguVLplmKXpyINlXN837kc9+ZOZxPl8T6azY6ovm+Th5IV1FERv1SYBeFfbwb18e6kkMe2OCpbHvBFCorwecoC1aRJ5MYuZV6+uWS7hHgVeJB8lflUB1QplkHd7QVDu4L+xEBacIf2EUlt1XcuRM6EFrh45AC4xTGU6YYlQESgTm0FldY03k4DD4F/ISasJUY4Y31tgSaGhuaGxpWLx0Xn1T3bQZHR2dzR1dpKRAeRkJsyce3bLpLzs2vLQlGrL8rNar1mLja9hE8Dyw/jzVJcQjxCZZ1Ncev/rK895+xbJAE1lpxHkXDRMgqaXqHLQ0clzGpCrFUxZww8KgPeyPv3nSTnoj0QFjhGk4TxdtyGANLSxuxDQ6zI13bRUl2NNPvxBpKi5AuoxSNAKSv4qGrJQEaFyp77POauv8FsOJs9g2ij4Fh79KkV9gWlDD4OBguigFJqJooJkUcaosUONN2vFarz9hxL1e9ul//+jcE1yrBTG4B23Fw7ATPBp0MmljvsIfYBdetfj8SxZbsUv2bbfXr934+itbdm3brYjzq/kgHXLB9EJ7e/vKlStPXLXkuAt0UJrpYTjrEQUGVXHyKhqvTMmLqO6oqGRFZ7G1j+8wh30GJiJqa4fC2EqXqBmdWb2eurFQNtHhxUeJ36fwwBCNf0lXSQQkf1USXZdsoWCg8Tv6wYLF8zY8txejRzqGhdAqceeKBMgFzknqBIrTBgWBFDbKQ1TDD9rAX5G/I5Xt3bs381CATxQbxXIEzZo7bfvLb/L+LuZUY62ddIIGsqTRKMTT6OhZUVtu8kb1oBlCP9MwdVjP5rcp81cde5V5bDLChoftSCQC2kJBvF5vfX19Aw7vEcbrmIDEdAPnfRcoECxKxOvoVsBc0I2qFje1HRVS8ANVCpFhzTbAfvHj33njTR5FSZow9Qg3zmig7i/mJVAg3dJoD2sMNdKErFfzjvSwmpkFZyMjFo+A5K/iMStTipb2JhgEYDgHbCVIStwh3s1fZcptlBg0/SVLlmSodNTLwh6wi1lHi6LzDcHIbDPZH0o0al5a0cP5C1qSwyrQXLCAkdhHcA54zeU8AWzIoTSYAdP00goBxFLiGEkCbwEQwVNIR0nFiiHuJ20IQDl50BM5B0PxWK475kbNKHv+yX26EcAJINh6CId9x2JxKga34UBGS5cu3v1Sn6J46ePDZ2qy/l7JX+X6BPLLcX5N87+WoZVDALuAYS+EyVqAoZ1PeI1Xdq9PT7HJeLHGfoeDLLvaDUwf0tg7TCWUYDdNaIJPhAKUQywZOssrFNOX0Lz83GFzao/HAxbARB6f30ingF7DlTgSzi8iuNEuHbWcf6mRgKdi7Df/e79qYS0SikY/OcOw8BIF4YXBwUPgcVI2ObnClCTYO1jOckhZOQhI/SsHkuoEKKyxtQH7t5C9ZtqJVpF+qshfZAG5mBZcvASLd0pybR3NsCUAyUBNgsFAH5kLYIV0iqfGYjGumiAWNC2iAfhErQULYdgIHlEsYTLG1TYRwO9c33E9V8NLdGSyV5471L8/7LWaRZYoZahvEOGYhaTpB3QYAzqMbFNvaacxFuzFEH5jNYp4tOYh9a/D9MnjFKIpisdHBwaKEjiNWXhEoNOYRfMuuayYtaPhmUQisXr1qYVKQwHTFworLqTt7IIuRDQEM1QYM/QcCqbH9scUDL7iq8iJuUBVULLgoHZhYZNgLoTDjZlevBCFcfvTxcsmuwkETfyaerNw+ImJsl//4n7V9KlY0UQlpEH6/r4BoEGm/3ykb8lxi0XhhQ0IYga75TFqAsFK3SV/VQrZCeVqMGHFDoZc2xDfe9yFZ6y0gsUcUhsrWt5wkQo2lraWmHfMrGkzYYPAOz95Y08YiHGpeuar9YCRoH7AqqsPbTWlfFDitBaVLQg85TjnHcrmBE6udo6o8ns4Oa57Zt+e7Qc1y4eOo/iMQN6ODR1NpyjM69f56nxeBBP7RfqCQdl/LP8H4pYo+cuNRhX9nLa6ZnU67ZxzV+omyjFOSx7nVd46CI2A5go1S61LXHTV2+igjRKdyqbOnAJVio+0e/p7B7hpaWr8C7Kdqrnzwbg8tzMgFoDuhhEu4UR3UtTfHX8SfrBJ+iK2yZUgBg5zw/OGQIA5zB596Gkvq7OTOs4hEhSGj2B4cFionJiswI8B7baBZZ58fQHiYEAv2Bssu0qYt5BHbaDkr8P30StsyrR22E04ZFSW1pu3PqTlYSmObpieEa0ufvoFzdTwOIfmjT9xIG+xbVMbDb6GE0P4AwNhE6cjQi8bW6z7FWoNnQsNXmheDgjIGjhMXIC8MVxLj1JCgC4O9cCFgSreG0x1CfMmzwkk9jPZrk1Dr63bolt+nA0MohKSAUA8SofUOgy59NhjhEW+EINF6/09k1/kkFMWGZAHAclfeUApbxBRB0nkLT4lmp+To+Ig2wY0YjRdOKfROp5xilFInKzkpopB9hG9PnHbV29haIYYsuE9o+x7VrJxHjXswlhn40w1rmPg0LDYiNCtKI2bqhwZ4CVc3EbCCUPMPC7zuhCfgBjkRVCLI7QVk1Qv7INKC8Ifu//V4F5mx/hhImmBQgtz38UbEQI/0mOvfPDX43943hrBgXcY+QJB0UUxbXXXjt30qRKl0Z2GAlPVRlFwwKeKTYFS8PL3dJOurAhI/iornAULoy88P0gNQ7/pLz3pHYUQUyFx3AWhzpRumUo0poauefdFMxbzXQNLnHlGE8ZBtp0t0J/Eoh90BYN9EQTyxu3OP9uPNo6mnut47TkTZKco7lkMKWLbHSTze5V9G82f/8/9t978pT/cs0mN0QlG0MUKciiLxbauCz/76Mt13masQHeQTxUVsMKCIl3k2lpYfjmiVSBOCzy5ECdOQfnKSAUjIPmrYKjKHlFlM2bPBHmhAyU0kKwcRCPJvWdFG+sRzVdQCcabTDURVcJdC5rXXAf24srXhDQzllxXeGtHG3plKCGEmYbVT+YClGuq/zd2FoKyUyzABQoERLgrh8K8yDRNIiJBEsYZjCUSNLtwz50PRPd7PIMt93z34W9+6oHgDmaNpChMpBudlDSqVKvAiyT7031PJ4dgrurDASikY6G+Qt1i2ODfM4QFYOn0zc3ip0hs9cHF2PqBbUI9LawiMlaRCKQ+qSJTyeiTRGAU3HQQpJ+O+OJukhInTIa9a5R43B48fuXcL3ztRopOYzgTJisoAlYpgnyJJHjvifbkKcAJdhODX3nHvwqQkT8KuqdJ0Cm2x8BPAmMBVXnqwZ2vPLelXmvzJGp9RtOrT+/44j/9z+O/3z7cTSYRdMxtmuUh0T00Jka+wvvZX559PaDVY9iezmYDWfHS0x079th6eID4SwyBeeqxltuFLEby0YvEnGya4PIXWoaWgIAL7hKkyKQTIuC0EyDOLRdojsxfQ+NEY+lfkIl24vSzMj/8vMk5yo2Q7Mh3SkKtBkdd4DR7daShU73uA5fXwPRSjHyJSEiDSFmXkx6erFdZj4x1dDR4/GQoT29spbe3j5JA/3IK55aGN3x0X8w/ut+4q/n/2XsPOLuK8+5/Trl1+65WK+1KqwpICAECDAIDBmNwr4kTHDvFTmzH/qQ4eZ3EeZ2/E5fYfj+xUxzbcYntGJfYGBsMmN5FESBEESAhoV5X2+tt55z7/z4z9557t2qlXYEEd3R0d845U5+Z+Z1nnnnmGeqr0yt/fxR+gMv0aSygDh5QP/vuzVGVxN4GQjc5okjVDh9U//OVX3/rczf27haVes6PxJma4TEQZsCLU7Vvv+GJTH+AJleB5ZKw0oBsVuWXNPv6tJFCeS4VTySKJm11AGRkHQc6JXWdhfypuFmlgGnrWU2ykti0KCAjJY595Ekmj+VpmPHMr/GUv5rCjxgoG3g5NWTF0x/8+NVLTrfFysMMxV6j80s0YLcvybwKwII/6eo6dot9hgnl1xBkdD7TvTOLn4BnLivH1N740/WHdw5ErXiQC7xsDp3TiJVwc/Fqq3nn0x2f/+tvXvvfj/QcEAgzDJTGGQ1h+Hw1eEjdffO6Kgxee3IEyhgnq5mE6RtE5IVOsHHsPNcehhURuJw9u/cb/NLYWAw3Jq3K7bFSoIJfx0q5WYgXcGyiTDJE/CU67NNJshzCzBApjxU+YbwJprheSvV++C/ed8GlcwW5uEwI3hqmQwcjZHiVp3YEvy5vVU2SuDK8bbb7ifyLx4VhOq0KlTKBCqWbY/KRufBFVC1jbXq0i3XDuticHAdCajYWs4Ke59mWi2ZDkHZTHfb9Nzz14/+86dBWzsxWuYzyQCLDKPEnA3g9M9DjeVm95Mj5vwJTUiUNtZIPsNjfO1BaTvVVTW3ZgUOUJW9H3YSOdEz1qUQ6EgVm9XN8pMwq78dTwLJZDytumivTohgfkieMQ/iTCV+VPRQwwU5zYGVykYG/+fs/veBNjQJPmOI6YtSyVKblZQpcnWCfsmtFwIXurgFRwXcFv8hxQkA2wi+DEmOyYOZ47BCmoQ82aXgkXZOIJyNiZTA3YMf8GNatbYfN6qKhAn4JDYXLi6Lf4PVnH793y+4d+694x2uveNvqAMM+MTFYRgWGD6s7bno46lQrz9VRBLaEDSMj6kZTSGGjA70jTC0N8OazMNSyBMmR5KZqfJr27tqvgpUCYbqEY6pcuZ0hBWa9R8+wPK/46PLdLl7Sp69682W+Gs5rMYzmYxgicolNPJEQay0kQR66P3yaCGJEFqO/7diSl0s/EUmXvpD0oNEQi3oNC/P/8f1PXvDmxjzDMqo5LA0bEluYiImvUQ1AnuE16oW+0QNy2YplWWxhCW/i9HanM2X6BGNjmIwLT8N0yz1QBpQBG0rX2ETC+0I8o5wKYMqFsDAeiYOhD15/cPvGDscDSqGUI8eESK0FuBCvccl5Rpw3m3Njufr+XcEtP3j4a5++qXOz4oBJeDFSuPaaJ9P9mMlJoJqrbGxkY1dCVG3NRwCDZ+ikuCpx+ECv4B2FocyWOuW05ZynW7TUJmU9dOiwhnOCmLFW4E3DelQ8M6GAoelMUqjEnQEF+FJXWVkUK+n82pEWX3tcmGj4HI95G76ayMMOoVwQGXLqUp/+5482LxO2iyNVhWM4Hs5ScxfMgY8BbuG7mGr1IsEnr2PqVjOcPgrRfDtiqfQh9aNvXpvuwdINyC2gr9cEBL2M4yFMmPCyQUQUI9KJTI+7dcO+z3ziX27/yfY8S4pD6uF7n1RBXHS4tIAS4gFexiRGkZisR8b6ugcFyvQE3BZLs1ktTmNRALSUhYihPg7alLuKOx4UqMwfjwdVp5Wm+SCvXHXadfk7YnZNYXvLNKLKQC0f63AYZmuMjBnPd0cWn934j1/5gGLqQ2p65MiUUnMJJnngpTgIzYPS74STvtLrMT5LtS5oEVVzORUbU0BuV2eq5bTiGtyYwMf5FqMaMSfBkuIN1zyZHWC1UTS2AKzyj8FERQB+YLhkMdHJ1f7qe3fde/NjZ555tp8OfFGaEHCSbwuMISIwME8nIcky2/WdPnZoawpLgziqti7hsh7rE46wAuN9Pf3SDABpqZkmKkXl2TFRoIJfx0S22YsUq7KtqHyqmZgwBZlmwoxJGVNFxy2DxrdTTtw7/9KVH/vMlZyNAXuAEpSoB4SpFmPwkMzKEigmdLR/2cLZ5joR5mMgpGuxY6abE0kSDNWX2LEJPOrGqO2hTere3zzq+JyAK31bgARkAnamKpK8s2XWjgEv1blz+KHDT3B2iIFyGEv9GnrDtYFKoZBODnETeNIaGMJkOWrhovlYx/Yyno1qPkxZPj8wMMJSgHp5IP0lboSXIbupWvVlKM4rPUs6eXhJXS3VvlJ193dxPCLjAzcFAYwszAQwMq+ywAi9vCA6svatp3/sC1eKbYninFGSLOIXXFp4wR0wrmVoa2FTWVJH47XYwqmSNXGjwgpkHC+LV4ZqkxcNFQfhlYbVj797c24AAZT5MCMSpPYwUMSUOaPGosn6PM8tYIvDQtJD8GNRlLzYXC+hYb0g6LgUCADjWVJJsVTKH+nuOwxzpjliEcmBbv2dpXIXm6L0pOKbCQUma8uZpFmJezQUcDBk2MgqPd/qI0LYZOkyg8vHhz/+t3/wJ393hdFQFeZLqcF+Fs/kQSRQEV8k03LllJNRDqqbKflFDd2CQfCE3TAcx8S5CARM4KwYhmSrGKgoJURst7uzb8zsdoI4R/WoHLnCMoQenRSriphBpAq3/HLD0xu2OCqOzIsL8BLm6yicCMvkqyJbhUoZG5W08Q3Ek8AL9m8v0s1SjXPqI/GCerKAl7BrWGE1ehemHBrUjqJIlaBTUaAyf5yKOsf9HWOEXdzzmw52DzPiALCpvs8FIdcEhfKc1N986aOrLokzWoXhQiBGQnnVsbfzzhue37+no7drIGYnnt3wQpB1mhrmLFiwoLa2en5rc7Nc9aetqREBTUyPWS2NNuUoCHt0hvJkNGoUyuGquoaaLjvDaEVeLlsgp48Y5bWdMHGdx6hQOljZE5l0C98aqIPPph+/+9mIFZfJogEvSCASMIljfgtlPsIf81EvZCLxtRCNVJil6nm3RaqkATZhoL+nc6AtXyvEsdW8NmVjUzcDhhdcHsvaXT1L83OEC5ZIFTebFKjg12xS89jSammde+C5HdOMK1wAMyEZIKKqnnf9WI31+f/3sZY1WsNLa0WEg6ers3PVqlWrV55pBy4ioVXLtnXs79m5fde2LVsH+4ew2uxE3GQyFqmyz3rN6ZdcccGKC5oAIVgQQS7BhJJjBjUBuOpgHGSLFQ2RH2EFDJPwAmRH77T+wRGjhVUzIcFrz/M57YN1v1//792Hd49YedFEE6E76HJ0sDUqc61yIZItA15CduNKoYA5ZGGOmLwnK00KuxYrrNF8lhtx0k4BB9lysskc86TyO7sUqODX7NLzKFOjn9vq9FWnbrxrqygNidybywiEJ0nKCjwvhw45y/rZILN85fw/+8xb6pbojY16CJU+85a66PIzZWiZQZ9XS88/q+DnHNY7d97667u2PLnTGYrk+t0Hrtu0/tbNy89YdPVH37Nkjc1hsawqhDBkhiOgNhbCdI5LlrY/d+8+BrgTOIcPdMGhTFEBYpSYE13FkMsrh8tC5U3G+qbMW3hJtXxhsFzy69iunn9ybz6XxL6sCNqBHUes0mugL4TnDxAkN1OuLziaXDQHimN4gUHZC66TkomkWMWR5AUfLcd1ort37DvzdadLCBxWpBNRbxDNPW4kIXYUcbTlqE+BhKu42aFABb9mh44zSsX22JnIcr0jwhc9eiZPDg3KdDobj0U5ZnDZKXP/+h/fEpmvl+cBF8QthYha3RU/TA14GA794muG09o3LVl71Yc7X1A3X3vfXTc9lIjWJO2qbU/u/9SfffG816/+k796Z22LxAVTSIB4xjO2XKSs57/CiciYtf1c0Nut6loZ4GPDhve8GgNhvJL0QQqQj8yIq1MWFDBLqGWpmff8srcTakQo37D65ld+khl0In7cEWWvqXIPizF9TwiCMlkdHQ2s7OOcbf1cysiGhHh0WOQAIcEsMcvB3ZiYo9Op3B0bBWj8intZKWCrxcsXOWiZKjlL0QBPOGBGlUxzDUz64tXRwEovWdX8t//y9kS7cqtVms3J2unxXozETTl4FR/LX57z5Yqo5lXqg5+57Gu/+PTytc296UOMxrhf/9Q9O770Vz/cv4ldhBKWoVdyJoPwlxeiAjYX3Sv4L+7yWb/38PARe1U5ujHWza2fVvdct+evrv7utz+77oUH8yiRsrCArUHDBjL8w4sicXCm4CXvAvXoHd0HtvSjjArd4LtkrRBaaXIVJn1hfsXnpRpNzwdycRWcFlMaVg7UHuwdLmGTpWIJWsfwbEI52LTOToz1VNxxoUCF/zouZD26ROE6XHQePdeOTx0RZSYnxhYib9GpLZ/83DuicwWDGMac9zp1xPFvZYVNO3i+puXqU//2vsduOvjj71w/2BtE/GTHjqH/+2f/8pG/+73zL29zWV2c3JF7y7ykVtn0WYSE9+nq6F2arxIGakoXQoqEYqTn1EiX+tl3bhrq8Af3bbn1uvtq5kTOPH/FuReuWnzq/KUrqsPN5xJWc1guDCcIm1bf/fefRHO1HIvNxqBRyU5ZgGm+pB4l5BodBxJipXpA81+8gRRoW8SrWAopfE6MXnF395FPlhudcOVuuhSo4Nd0KXW8wllqxdlJL8g4gJd8sIvOrDZqJqL4iL9+f6pn8aqmT3zht6JMG0VOLTIpzZgwdIuYVBbhiF5SgB1jAnv+u+affdHH//yDX/MQ12SjGCn9wVeu80Z++7J3twWR0ZxcyImQuqWqalUiGc33M/eTs6m7OhiuCwS/jgRhpbJhnMtXP/iP24IRty5Z7+f8GrvZGggOPDO04fZra+oTzW0Nq89def4la5avYh1VRZlUak4VXZDrf/xsZpACaiDPw65p4ZvJupx646espewn9RVqUJ5OideSWBB/ABNgWvVEQlnqvAvP3f/s/Xr+WIjd0dEhy7vTp8akxam8GEuBCn6NpchLek+f5qLfOx66kEjmHcTm5egwujS+4zm12T/+69+taSuBVybjR6NiRX902KO4A8Iw1A76wdB95qt/8bm//eZQR7Y2UZ/P+dd8/ZdZ/81veu8pUshx8EhEIBfjDk1zm7oHUCRjN7nbeUirUJhZ3zRLEagbvrdp0/od+TQHSsKOocQex5Rqrs+uc+ajm9H5YurWbQ/d8st7qpqiy09vP2ftmVe8ZQWk69+rrvufmxNWc94DyPVWoWMnwzTLWgpG3amvnKKWFe0TIYWlhlKDiPnNCoaez8oycaZfxWCWK262KVDBr9mm6LTTY6DKWNPjbeXqU55bvz9O98eWnhmHJh3NhemNdPKpD+zM7330rQtXcU6qvOaJ76sY4BVC3tSjd8q3gmKual6uPvqp93/9n3401DeEwdKhkew1/35DU80fnXVpc7TWlGncr5wl3ti5fQ8W4RE/9XTqLTX0rImyE3ZPO2aaWP7DhoPnqb79/p03rXMzSScaDUST19fSJSuTzjH6/QzCLiwzJ61sLNPhb+4+sGtjz0//7db581qB+3qnJZtFTx4awAWNK9tsPBDNEKDKEFkrUhhgkum84wwODDCNZQOr5K7n8lo+VsiYkITp68u2NMODVdwsU2DcJ3WW068kNyUFzHhj62CUhsA0lS9L/6MdI1lMfrmB56Y8d+iq96z00anniY7raCArzI0YwdqNTuAo7hCKWQm18ty63/r9N6ooO5hzc2paqlXjf3z+O1ufOoykSSZnYwso6dc3NZjxjUhdDrIlzETByosCvyjLrdQsr675r1+ke/NBmu07VEBm0UY6jkdMbonph6hrJSN2TcJuTKhmNVxjj9R370z37BlR2airIoQ3TlI8Dm5csxTyINPB/kFPL3SYRy0tLRTX+HlLfcD03l6MWuAKz83byu/MKVAh6MxpOAspNDc3yTi22E1XcAYBzK8MAztj12f+/kufQM4TQXA92oWx4Mi4CvChPQITE13oNJVfhTCMMIy4V6k3/9YpK86ZPzzSo3K+GziJfMPPv3GLzz4+dDXL0pe8cJZqa58n50sKjroHD0x86LSwSMWCA1GChPQ+Xz23rv+Je7arLKuHsJNitsEgEbsPUQ/VexCR87EZiBPJnJxnZbJ5Ls+3MzkWXpnjAvwkI2actRkyUcMoXExvi5dJc/q/DmxTeJkSmU8GhQ4vXftIJNaJjXtBXXF1dXVDQ0PGr/d6i1m2fbsOjAZ0Qk92maiV32lRoIJf0yLT8Q1kcRBsYySCDUNmjwXzBny3TaZ4MOSCjbC205pWXRzlE158M5uFKgKLRiegrUZ95BNXN81PpHIDQEUsSOze1HXDj5+SQTcRC9bY0uirrI95BozK5PKp7iOUTRTV8qIeOtyp/vOL349k6qx8NGRbTGSDNcWE6Ki22HOEPIozJzHz5VgY3EJnlhN0tWNzD+4oNzwWkz/2vwJnvd2ZApQrVVtbmmZTaPa05738YP+IkK7QpMeeWSXmGApU8GsMQV6e25aWZlTAcAxaPbSLxcD6C0aP+YK7+cvfvBbmS+NXYcVRAhVRrhhhRn9lvAFQlqiiNS9Sb3/flSNBb9bPKN9tSs67+ed3dG4NcyzLyFbz2xo9himb/ZAJZZlCThRMxzBcBxAME2ll1de+9DNk2xgCFL320Q5S6AeCXCFNwDhcyEYZlBe7gujFRSJcxsTg6JRm886UqvALAjM9DKyenp7wo9LYKNPdQpbCPdqBZ/WzRknNNXnDusxmsV6taRUJ/Wqt/wlRb0vNmdsUiTpiLoHRAAYUhq6UjrHAIEjWR1935UJmQwW8mqLcZXGnCDXFK2CDPTkYlHjb+884Zc3CPNsD8vn+npFoUPOz790ouk16KJanMGeesrECJg7wU92HRSV9MseaA7NUjP4/99DQrmc6RIBljWW+wrg6zcKPeWhu8BsPDJdxhvniYRj3JfDAD5JhX19/mG1VXQm/aEeZV3rWUF+qMPV+Ccr0asqigl8vd2trJqN9UfRwl7Ybpce9IIi5RESEcdDU6jUrBFN04CmHqA4xgzqRHxepoFGBHbHf+ZO3Z4JhADXCSmHW3ru5+8nbDssUcrRL1il2caPAirwKe9WdhyaeQDLIQRcSRtY33KVu+ukdI92sGUy1CK75LWG4qDU4BUiFs0We8DwMUA76pnSwbeE1urzHckfJBRpZEdYncRtCkb5juQO9AwX8gnAuW4hKeshCybxzCJkgPsF94vFn3BfgWEpUiSPUrLiXmwJs+o2ppjl1FuY/w+94sVDygbf8FatPyYg16HGOUXF8HPiFjujZFzW3LW/27HQk5qLoMHA4c/9tjwp+lQ1AkcqjeNHaaDhHx4p0dshBapM5mV3l1A++ftPmjTujrCpiQmxyB0EMTcAp5oYGpHhisOyl57nGoCTTe4Rxfb1YnS06W1XVFE5Rk5KL2Wl9im2ppSqDrkirGf+tkHLGJJyVBMCvuU0IdkSawgQwz8Y+9gnp1uGD73gLF7dgJGZiJ8uLpcExBXBMHH2Sp6Qo+vyW+oOPvLc/ezDvethayI7k927v3HjPAcEvzUwVYluYvmrRciqx6dODxRjelxXKBDPFZJ/jo7cefOqBbVYuckTwMjjFr0ExagqQhb8kGwIZfJnAGWI4MUshKyGTXcU1SbM4WWR1Ndc5rsiF+vGHTHEGOrnFwzoHvxEnihVp892RKuewOVGl9WAKSzHQKjOSYZO55lvNiINvYw2j7CNQyqfiOwoKVPDrKIg160H1fESnyhJkUz2nfOHG58K6JDruQJoZ/4yi8WFkFIaONMIrfHj0HvKBCzvjdXVLz2yFBXOjQGqkvyf1xPrnMwJQBfPT4rNVU3M9jJUL5vl2D1bAJnKsTchxlylOxr7T63MjNlsr6YFmr3V5BQqRqamZHnIf4hQensOLRbSLamf8CPENuo0hEbehQ8XNONLEM4qT1NlKOfSyCVBYhoBeYOcCJxO4ad8ZzrmDuUh/NtabifTmk0NdQ/sFzwF0UQHhTMwYckxWY9HRpXa0aSyaHBksNAqop/Op/MwCBaYSPcxC8pUkpkMB+jMi/JaGLflD8gEvbK0uAZm816P7CB2fMDqSwUAThcGjR+mRy6FzKHBvpYwQVlnqinecf8OB9V5P3uaMjpz1+IPP/OHH3kBQWTOEO2TXQF5V11fBGGECyPfd7Vt3Gt6inAUDvDKDKpZXP/rGYwd3DDqqypFNizK8JQ9xpWxNcU3J5ZdAuiL4jQMbjJPnQp8CX4aHuDwsTwF/qN8gfmNnSMJIZI61ldRxUh2JqBeBJYFsDgiDvw2wDqKcrBu30rmh+uaahubaeQtbFy9bsPjUBfOXNM1pUzk2QcBQaasYy1cu27xxNwsxTHfBNDGJkQ+6Orz2tsJwI59C+SSTijt2ClTw69hpN5sxmX/Na85bz8ng06NRRkPRsU0FViA7oiJ1xUej/xqoMs/CeAWPHvtmVI+ONL07bQLs9W8987afbujrw26iy5GIwbDz0F17Ln9fO1NacpHjw7Pq9FNOv0E9wHockJTNaAk/NTD1KWYVi6onbhl4+I5ngrQbt2N6EEsIjRzFQKP/mpIzYxR40b94QuQS0MkjYSo5YpsbuFWdspDRBAs94BdObsUMj0yTpaRoo8nKLzpsnpycRvwIWzutWIKt5UH7krZFy1oXLW07a01rslY5MI5sBwJvwXdIJFN4gT4QED2YZDU2rL1oNMmx5GziZDMkuNbXN9CuGgu4NZoslKTijo0CFfw6NrrNYizENGJLYWH7fMQ3TBOR3MgJEAIMMsY4YTsWT+7d0bnk/GbueazH3iQjgIGkw8xK+Qp58AdLYW1NfQexY2Upz7Wy9ro7N1z+2+2gquxUtlW2W+15oSvixJHZRZxILBLfvTW9aLWYojdOqoIYqFf9/Ic3pHp9QhbFY/BfRxYDlQNWWDUjzi+fMOq6CyhBInYiEdLgFIgmVsGEk+KZ+MwreeyKyloqO+xb2UyQslyvpjFZ11zVvmx++7IFi05ZsGhJE0d6x+KK49kEsHQCo341eBWeaH+img2qrJUKziLoIheK1CVrGhq/ijSRKBU3MwpU8Gtm9Jtp7OLQtdT8BXPQU+VzbgY2Qw6kEjGv6JjHNz/94qW/0xxou1I8nypbM0iLIQhKUsW7Y/lrYr/+jZfs23rDSHcOe82Wn9+1+dA9v9i77/CuXS/u3b/rMDqrwQjnVsTYzxN4Ptt75AQzjcAFCKOinvrVjx88vLfPterZmmPAgIlZARX0TvUx5TOqEjwsiMG05B5IwhlEo2p4wlvgiidRhIXa8dx4oCXSQ8jL7gCZIvJ1kK3X7DvN9/R1z2+be+qi1sVL25actqh9SWPDXBVvkhVV+YgYnRXhYfVkmWdUikRJ2Fy6AXmCRghhpBLsqLeraSmecLqaNBYMWqB6ewsn3ZoiFWpdvKn8PTYKVPDr2Og2C7GYcJRSsVRDo0J7XCyxaA6BYaZlJ4w7OZzisQef/EDnhVXzRAuSTToAncBKOS6ZoVr+RPuPFrzKEygVD0WK8xv/uWN7Ije3yqkHMvyU87/fvMWJWFWJKr8/ybGJ6N6zzCAn8rAN0s7v3rGn/fSlMslipDOwc2rXE9mbf3pvxGtyzb5tnos+WwFiwGoJOhrF4K2MPj1/Qj4rRC4JTwzYG3DNdU1NwSxQj+ey2lFAN1mUjNdEc7BZXspNWDVNiXkLm1eeddoppy1pW1LDfs9ocSFBUqQgAFbRbrXBXwPigkYGpCg1raevoF/19qiD+4a3b929e8f+3o6hzr39djbmRqIejSiOA8qzXYe1FUPNrkou4gw7Z/yV32OhQAW/joVqsxeHEWDGrWJGVVVTlU7b7EIudyIb8pSfsn74jXs//k+XM9liIzOOYcGwLUBYGQ6Ux501P1OueoUFwX2bBi3O9uCojrxrp6Ns7c7BkKViwA5FIjt2KJoR240VMG+pbHiijIx2T4xMJFWDjYELcVJrzR/hkYgTOsArFouRYE5zYoafMqBmcI0n4CVviS5ACDdmYXIHq0jk7ZAAAEAASURBVKhwgHlMOWfSw/FEhBPqTl25aMHi+e1LFrQuiNRg+lFPBll2gLo2xrv1fF0S0XAqTJawTULdAqJSRl7ppmHP5oHdg3t3HNixbd++XQc7O7r7u4cT8WoriHKCN1A+2J+Ou3G+P6VmCazenj6JXhpwut0nrHbl4bQpUCLntKNUAs4uBZggMjY4iEJV1yQzXXowi0xGIwEDHxmKoxJO7frbNr3mvLPXXNUgp9zrcQZiFCBMj7pSsUhwdh1jzVHnXLR619P3uVY162qAKJCSSCRy2Sz4U0Siwvh3Lffgvk7hFTUQEOD6nzyyb8fhiFUXwHzp0mq8K4KX1FbG89h6aCw0IEV2eIAtg1/4NaZBHoFy9CeiiThLhDlO5Y2m2xa1nrJq2cIl805Z2d66SEvZDU2KlCEj0MXwsAJKIYSiT6/BVU5FQxLJO18d3KUO7undv+vQwT0dzz/1QmoolxthgkxlSY4WiGITJJPi4CN2ljNHVVXxBJocHAlOURGCsSbr+eyp0qd46AJobJzdFnqVplbBrxOj4enWFutWsW4npeeNeklLKzgyOBlrrh2N5+r//Qv//ZNL/iaIJWQGhnyJiaSwO1NWgZE6dYApYxdekoKrlq1s53Bpa0SETzxnUjYwKGatQBCZCDJ6RarF4oNADFuIRH2K1xSA4DCNObQvYiAQy3sSDG0xke8x2ySEJKN/Sz+8E9MzIrQiEnNA0UdlM2bgZD3lsavcz+fsuMMBZex+X7RkcVv7wsbm6vMvieZZFdX7NxHBsekqy+qI5nUgFGma3IQllPQlWwotDj+meLKqq1P1dI5s3bzj0P6uXVv3dLD1JydaIyQUtRIORxx5SSJi/kKOsUW0Bq8nFAmisRiiv6yfBdaFJporlABAs+eYLdxUUpfFZFn5nSkFKvg1UwrOOL4eQ6Ri2aecunT/C8/pOYqkagY041wzIHzro0lrzvvf+C9XvPM1H/r0ZTLeGJeirwSTU4QofVcoknnGKwlZeHZsf8gbhDrj3Ia84+nZGcxL4NhWrCrCCl7Gy6A04LrRzKA/MpiLocNq233dAxa7AItZVyeSjFsuasfIp0YUWk8sxUBDWCqBFe0MqFH0wE8zJcyL+lUm66fQS6hrqJozv37F6jMWInE/pX3hYrYqaA6LX6JrtDL04I5sKLxkpqfa5ATvaMBL6JvlwCR1aL/a/WJPx/7u9eseTw/nBvpHUsMcRxCjZFrXtFYbISMFsUHG1FT2P8mEmHQMHEqhYQy9XI45vWtHmNBCAdYHQHoWOKV9AqbYYjVXCqPbgsxJplhdXWf9M7OGKqXzKvFV8Otlb2jdY0XKYs+Z0xggPmEbpB5s5b2bdTpGQT4bjVrNd9+wcfe+7Z/9jz9WiJKE8yGqHqB4xg+IWaofCaNX29BUOzhCUcnWs2K+F82eff4ZybpEc0tTVbzuhY27Nzz4dDYFt+imhgdlx4yxmSyrE3UAC1MqsWYmA18KqudfYtRLdtsI6wZnxls4Ml+OIBOY8cgIzZK2hQsXL28XhYal1dUcZU3F6bkaEY3WlaQGJXT1pXyatxLxIIQRoOGR/iBgpWxY7duT3be7Y//Ogwf2djz28FOAixNgxDUWtRMcVG4HtQn5hkjqcLfaJ0nKgrB2muTyROOYeSaVoRLMKYWvNCypcF4SRcMfahpq/14171RdSN1oJDEqhWJKlb/Tp0AFv6ZPq+MRUvp6mO6ceY05P81Aot/LqJO5iR6SeujAC8DjRO1ozHG3Pd7xxU9c+9ef/Z14C+NB2BRhMExSDBk9PIrJFqZHxdsZ/WWT48C+Awic2FLuRYa+f+PfsqucCVospjhlgwI/tv5xWTK03ZHh7NCAqm7Q2VmqrrE+53vo5gMslDUcudTUC7K5XIYTLVUUTMwnqyPomqHa3r6stXl+45rzWkTWLuQoJAULJ3J3AF0QXadk0Fv/QgYWQSUsr+CRMqpjj9q/Z3D3tr27dxzcj7j9UHeQQ3Ym66QROx53miUJ2WXNGqnEEeOIAj0kYaiqk9KZT/0jnxity2ZgS4ogU+2Qn0aFoneeEoqUmnzqFCtvj0SBCn4diUIv2XtOwWiugf9iajZZnowHttdhTTBpzX3hiY4v/8M1f/rJq+ctj6IiJipGjFoZdcfRNTTVB9Ye9Flhi3wnA7JEUEbXnchNqMb51SlvOO5ggBRpl+rvVtULNKPEGWv1om8vtbMtL/CZcjKKtRKWzcappnlzMRDUtqh54dL6ZqIw8QTY9aWBXGqkAUVXTXNVJVSGWqbW+rdzn9q3t3P/nsMHkFztPtBxoCc3nPfT+XwWxipi+5Go1agn3bC5LhgmGEdRSJP5oT5NQBZHJDODl4JBJmuDaMj29D1lF6wyfkkC/BNmT9oOY9fFMlFHwVvPy2Isllk2t4ZjligVN2MKVPBrxiScaQJFFsxStQ0MKkQ9mvvSgpvRaZthyjPGQtTK1+99duhbX7ruH//19yw+6jJtKX7qS6NvdAIzu2PsgTVM9xilzIaMg98xcMvwrmmMpnPDthf3c4EVcw4f6m1b3SBlsVXrKdG01WvbWSvuzG9vWbBo0aLlTAkXLlpaU9Mo+v0l9qqAG8VkQQIy4KGs9wnDBTgI28XF8yG1b3t+z44D+3Yf2rNzP7CVTuUymVwmLapXEAVUEd5K1Fexr68lbTIplDwQV+m5nsYg/aQAVGVUKj4RzNJMsKy6El2qj/QeZlL+45ciauEYNWfBwUNOx+IlnmhVdNmyhRiRPuOc5Tk3pTT/VZZDxTsjClTwa0bkm6XIBQhLJlWyKpZFwQoz7ow5BskkjtGEKYh8tnr35v5vfPmOj/yfq+Qs7hJPUvz8F7FgkmSO+jFqAZPGESvSzBNrrcGo5dm+FT18sIvhKmyLlqn/xac/Or9pQXOrilbLKUdSWpm3CQOkN01LwgSm1iL70pClYaIsQ+aqfWrr1u4Xn9/FfHDXjn0DXUN2PqZytpe2YOksAUKImYgIRhUc8fGJMaIi72NQiYdFeDJZM9fTUKyxTARwACQiNApFSG6lNDBdIqRD54zlV4EsliBhrJx8XVPt3LmN9U21c1oa6xvrmuc11TXWNM9za1tNTfQv8nu9n7SsShXvjCgweXecUbKVyNOhACMNp8cMf2V9UVVXx3t7J8ctHcH8yKgK2F9c/fyjh269dvM7P74SvYVSamUhZ8sLq9HbNcBQBlo1p1eAGVMNoCcqVlhjw4OCWCi4dh7qhSMsQKijXnMVM0NxvJZZFdJuZm/avgPcEVM3QhrwKgQij7w6vE1t37p/7/aDO7fuPrS3q/MQuyw58DLC7kJEYEHAaicLfGARpsREnQGrD6ImoXPlVrKjZORVKIekPbkr4Bd/YDPR1aCicq6ww6ojR9tRmVzrwnlzmusAqbkt9XNamprnNc6ZG69HhmbSN7QI/WWZSpGKABoWwLS9iRQ+rHimT4EKfk2fVscpZBnoxFRVdbRfpVh2B4vo7XocCkOih5/p7SI20gpNKEMxJrxYVdXWrc8rtXJU+fRoGfVkxjeAy8G9BwDNQkrCoYiDewIo+GUKm6yJDlvYkoHhicF/pVMqVq0RGj6LCSC8lg4mI5YjPMwqoqCF1r1KqYMH1Z7tB3e/uHfPjn2HD3QO96fRvULNS2XZWcVBPnbSa7Z8oY1oM0haugSSu6CU7EhAZA7QcitPio4n2ml6ArvCaAlLJSDFK8Nn8QvEQF7htlavOcPPe6ectjSncudesLqqRjXMYYOExinNOQr/qPUhaKdxZ48U8638Pc4UqODXcSbwEZOXkSVYJQ4rFIvmdL6ww2MTNBpPIkCBz8iyFRgrLrFkHL3utoXz2tpb2tpbWxc2t7bHY6w/MurMiBIYMAlpRNDch0zJjEfnMKMfNiF39cu8VtiS3KLFC8LcSNagRevClsObdyKB9z27vy9lR0tjGzE/8OEKaGhJXUalh1R/p9qxrfvFF3bt2r53764Dg30jMTcOs8QsjfVBeCuwRNBQBFiAtkjYDVclNdRZCkwxK9MOxSsASEIws5NwYJwPqlJkGEJR+hXE4YA31LvkM6CiweLlbXPmNjTNbYClQgukaT78lBszR6AVaSnVnMiv0VBeFbKfhLgFmNcNNUmQyuNjpEAFv46RcLMcTUtnYEgaWiIq3pdNBy0yN2lmhoLIfM25Z9U32dVMUrCrzkBiuJiLQuAJh9Ysl6mUHIhj2JSD+zqiQSNKWjnltS9uMwPXwKP8Wmru/Ma82u7azkjKO7Cnk5014HB6RNCGOKlhtX3L4V3b9+3fdWDPjv1dB/v7Ogeq4/Uxt8pGVYz9SEHShr0STC/UCxgC8DRq4ZOtQuAOmIVdLUTpKLnj56HIoVB8iKIuClSJbQkgLOtl0ulUpDo6r7WlvqGmrqG2pr6qrqHu7HOXJ2vkmF4FPwWoUXLzATC0LaqYluo/Mx9NFELYzFKqxB5LgQp+jaXIy3nvqEvfct5Fl53XuqBGJNFmaBl4Cn/He4qcSKnkZvyX7mfBB6Zse244Peyx+RKo8D1/wZJW8MtkJZwQrE1eIR5CAZUVwEQi6Xnp5+5Vu9k0uLfjwL7OPTv29vVgdhV+SDYY8euqupaqBgEeOUjRCNakqAJZRaeZLMAPBJCaC2CJNVSPTdfROGI2J56MDaUGc+zaqYnNb59T25iow4BXXXVdY/VpZ5zS0haxk0yyNUNKZyeNEPHx4MwvHkNYAhY9+vXs/ISZzE5ylVSKFKjgV5ESL/tfPbTmLqmBYRFdLsawnu+E5SowJXAHDGcCGTsV+qlAgJlDEe14OHLx1H13PFSdrLOG5dBWbOFjzoHBXy4Xxw//BciATtlstren/98+/98jI0OOFYs7iVw6UhuZBydFAWHoACjxWEySczlmySCJTO4sliSoH3NCEUWhiB94QR47Wlk5mhZDFEGuoaluwULE5g2Nzcz76vmtrq1qX8LJ1xqDQtA3OKV/BV4lL/nFHQ+EMilXfl9iClTw6yUm+JTZMcC0MMeIdoQhKazzlWLxiimUjFTMq5d4hrLRWQo7ez64n2H15IPPMsUDngCCeNwVXTDNVwgsaCEbuNDWXsdJa1HsJmeDlub5fT09iUQ9ygZI6uNx9NqRR6F9IEYjkG9xWhAxMbOcrI6mvJQX5DJ+Gtl62kvVN1bV1lU1za2d29o0f/7cltY5LS1z4lXxqmoVTyo3osGdzmtAigJQCANP5b/m+TjaCP2M42NQ9Fb+nowUqODXCdZqevgZ3mRMyYR9ALVKwxROg/H/krg8JwY95Q26shooC3y5hjk1bQujgq4GL3Qp4L9q5qhkbax3Tw+MVEJVAbFgzUh2ZCQzEI1Gstk0TxIJdhvp45SYNLpWbUNMcKptaUvr3Jb5c5rn1ctKX7VokxTUuXTFCxM94y9WejJOqoRQxZAT/9UfgolfVZ6eDBSo4NeJ10pMo4q4pPUkCiO3rKCFQSycmJZe8wpcwy8ecI73Zspk4nB7lM5wJWG8wU51zy0PW6lo4KGpFWSD9NJVq5ArAR+FkPBBxXXORae0pfr2xZOJgeGBIJEZUAO17dWROCDVaLv5tgXz5rXNbZ7XwO+cFqeqXpdMV6gARsVczW3xrhBM13CqyhTCj4o2VfjKu5OaAhX8OhGbD+H2NF2IX4Qv9wt+aXmPoEARyyZk6ybICJAw4x8Pl6/+979v7No7mPCbxK6iygXRzDkXrIL5K2aiUcxEsdXcxTU5f85pK1fMnduMOB/R/JLVYkJWtmHjCMZFBZH9a80pgaRJXDHJSV5XHr/qKVDBrxOsCwjzNMWIFumXLrH8oldA8InUj0AZ0SgQnkgnZniZIo6Nq3JZmMI7/YS4mX618Y79665/3s1VY2SencmRpBOpi1x0ZRtKDWKHVUdgdVLnJgqpv/+Jy0rzWl1MASy2ZBtniq/9eGWn8+TOFJssyvG8LIHJY077zeymNu1sKwFnhwIV/JodOs5yKgWQKrFOk6VvdiDLW5irMBaQhlnBdDY9nEbpSb8UrmdS/BqTukY9AT5f7Xsud9337oh7DWIlXkDKGkz1/f5H36qSok4agk+YM4jDfp6pXBhnqkCFd4ALwcvBaxqRKkFeRRSYuq+9igjxslUVVmmME5agOMrHsAfsGCo8MYHgxkohUKJgSQ/dKkkvr0aGBvbu7Fi9chUZBLKvWeuwl6HcmGzDW5BLwCun1v1q/4+/8at8Kp4e9hPRKPYLozGrtiZx8RtPL5gPHA0usp1pdp1m644+yXEknSqJCjxORZ0T/F2l8U7wBhpdvNFgVw5eJpyxYGX8Wpxv/8H7/vL+256PeCrioXWq9yCzDRn9svJLAxbGZdihbLMDJ6e2Pj78X5+944df+2UkW+P4sao4iv8qlR0YVr1v+q3Lq5rFUo+Z3I0u34lwd1TgdSIUuFKGY6dAhf86dtqdaDFHye8tFUskFi5c9NYr3/WTb1138zUNV111xZvec7pYXkYvYcLPFjxXoLY+lrv5F3dte2ZndhBby5HAFX1QpoRseqyqizUuc97xJ6tQtS9j+040MlTK8yqigPXiiy8uXbo0/JLrj7aIOaBB6A8HRugJKRQ+CT3hq4pnehQ4Wn5hQuyRrGTyqC2I4mdvIH6x3pdW//yJa7Y+sTsZqx7O9S9YMm/Fmcvbl7QtWrxY4mhH2724dfuGDRt2bNmT6otGVW0sn+C8CX0wBsdWezkr7SbzTe3xL/7gjziIRyxeMSEdN1sc96CYQfnf4sy4/NlU/mklWp7ArNGzPNGK/8ShAN2Vro4pum3btlXw62VvlxmNNxTZQxH+BH4txsr2qi/89Xf3b+thJmjnORwEXYbxTsMip09wUg5KpaSKkQmsTLBX28rk3KGFqxs+85U/js8BzkT3HTXU8W5aUFPBr/GEqzw5GgqU41dl/ng0lDuRwtKKFAdOGY/hlwEyY0EUD043s5jEwqzg577+4R985c6H7nrS8Rw3H+WUHYy0Y5SmWKGCB40uSTDIZzwsxnvwWUO5waqG6O9+4C1v+9DposDFmiPis2kBVTHtyt8KBY4bBSr4ddxIezwSFtAqrTmGyBVyXjwJ/fDYAwMDDQ2NIu1y1Ac/deV5l6767n/+sPdgOm7XRvI1mBSVdUZxepslf2VvkHKitss5jzFv2O97+9WXX/XuS5qXiZIZcIfMXnZdEqucayzC2XRYq2JYnW3lp0KBmVGggl8zo99LHFsYrRICGLaLIpR74LzCQiEjIIJAnssRQGr161v/5ay/f+zeLY898PSmDVstO16wtKUjmHQxOorRsTVnrDj1zOXnrF2UxKy+lveH2DSqBJJ3mFvFU6HAS02BCn691BSfrfxCzAoThPkKwcv4a2tr8cCI4bDsh0Q/1qwuec+K09cubmqKp4ZUX1dmaGikwIVp5YzWBU1JTjPi3FmAyWCT3hAA4yfIFcJYmGvFU6HAy0eBCn69fLQ/njkzizQAxy9cmFmXlMklAjHXalocR4yVqFaJeRzKaPYllqGViMFKexuBLeMMhB3PUlfSrlDg6ChQwa+jo9fJEhrYMrIwPMbpkgsUifBKbKjqQyxArZCl0h7ZAKQBK4QtU+UxtycLHSrlfGVToIJfr4T2BarAo/KaMJGEBWPayHP8BsIIYkKZ44xKMUIIY4FxHHjpdc7ytCv+CgVOFApU8OtEaYljKAewFcYy/pDnArMMbBkUM36DYkQpE/GHCRQ9pSQLTwzMlWVVDPmq+GvWWUtLIq+KSp88lazg18nTVhOVFMDChcCEn1D6WQGHDHLxxEjEwpBFcdeoREsc2ajHJ9cNWFOu3HFyFb5S2qOjQAW/jo5esx+6pEQ6rbTL9b+mFQGgKtNxJYrBuwnjCvpp3DsKICvg5ITpTfRw1DR3ogCz8GyG7JLA3+hqlQPixIm/FNWakDKjCzphkMLDl62IUxVqhu8q+DVDAp5w0UErymQYsbBw5mF4q99O0p31eDCzRVI6CiALU694KhR4qShQwa+XitLHOZ8xgDUruYFiIZDNSoInYSLj5V9H5sVOwmqerEWu4NfJ2nIvZbkNik2RY4VNm4I4lVfHjwKvVvwyUoNJplDHj9yzkfJLUWiTx/RFK+MBroJoE7R1OUGn0YzlwSdITT8aRefpRJgsoZPz+asVv06c1qIfT7/bHVXgE6eOJ21JLLGTrd2YNiqiz5jHs1PRYqLFTGYn1VdkKsXmeUVW7qWtlOl1x9LnjohKxUTJoti3pW6yy0f+zGY9C9/zSdIkR1xJZV/nbKKYXxGZmdJOksJslvVIaZUXoUjCUpyp3xZOdULYRTjzW4qqlU9I0S4dYmJSG1/1CfIte1TmLaZOQiYt48mrIIs1SnnrYgWXxU8xiKsvHoUe49ddQoLyxiRibl65v69K/KJp6ZGBev7JPVYw1hBfYfgVmxz9KTYP8mtc6J/XnpDeQ3/Sv2Es0ubB9B3hcxmV7vOG+9PpkUwuJ6Zs6KbRaDSeiHJYWTRhx2si9riGCkSjC1uE7oub9rhWeDxZmPPEy/zmdVjaMHToqamOV9fGk/WuzRZu7cbMDcVwflYd2DPU293nutFI0blRy47ZTizvxDg6OxJhbE9/CAXqhaf2W/qUkbAkRQ97nYre4rBkORX6JBKJeDwai6kYtWcTJ8HMpRulPFYp/iQ+Sko1hWT4OKdEfmkUtfPFzo4Dnd2dPYl4kkxzuRw0pydUVSebmutb2+c1NbsOZwMQ02RKRElluo7guAKh5EwBlU/Jmm9uRO3ZM7hn556B/mGsSBKA+pJ1Op3mt6amqmV+y4JFbS3zlBgE11nL79i+PLoYZFZGydHvTta7ccPiZK3Ikctt+op0U3y++sLf/WDTg7vi+RqOGmOfDWyDUTLAAJbxTJEiHWrFGcuXr1iy6pxlZ1xQLYcbmj6k+8f0+4kUBLsOnnri3r3f/tdrIvmkzamKJKbHQMrvP/Wc9s997Y8JUzTQJfkUAIWziPRZZZ/9+NdjXmNx3BR7aJ6jZnUNJjoUiDpO7CzsrQ7+7gff8o4PXUSAQij9x6SGl2M+1t9+4NtfvWawJ93Y0JzJpExSOTdT3xr75k8/mbNVoBcuJ8tkgqx99cvvPfDCxn0qH6GqxbXUwkqfLm3Bbwz+0EB8SNiXnsnkMplMIhFbctqicy9cvWrN8kUrbKta8VWiwJqKhdxM64/P2qTLW6jqZxVmsfdtUQ/eufGWX97tGDwNxCiaoTkFA1zALzyRqJP1MrWNVctXLjnjNcuveMdSoITWI0FzGqbJqzzfcoKE+fJwZChfExdL39uezD627pmtz+3Y+eLezAj1qpJEaEq/tBWM3C0y4Ee+rPnFyxeuPve0NRevaFupG6wIYZPBd3kZxlPjpHvyKsIv2oZOU2g/OoBfE802xfx6DPKBXxy+U4KticZ8edOiELrjyR7G2+03rWtsq37n71517mtbk/UyZnx9toVBs/IoE/qDQIZ61FWP3rs5npvv+FGOpCUkXRPDXY5V3bkjs+sZtfgcOb56jGMgmQK7ufp4tlmsC4ozv1JLM0KMpecJAMsggY4T/lgqg6GdaK6mMIUJXxiPPqYIfHnkzmfUQF1tfo7qj8UtMdFD+raXHj48suVxb8WFblabryjSekwqE90Gys3VRjPNsGDQxIIolF/b8ynkrFmUkKGj4hpElAs3RISh/K6ne3Y+f8cvojfG6oKPfuIPz7qoxcZsGbScznjNyzG6fEU2PT7A2SUbH97cUrUwpgtD7oJZgl8FIAoCDw4I3tcDxVSsb9Bfv2fz4w88ff3P3d//2G9f8IaF0qYcHAAQTsNRIydQNVHrwd/svfb7N3buH4zbNbYfiwXNccfOZ8rRT2EXl0Y1JRGay1fY372pe9MTN15//a/fcvXrfutDa6eR5ysqyKsLv0pNZ4Zi6X6aviL6Ed2PRK2qTE/mYM/A93bccOPCmj//9O+3ny77n4EgeruRB02dLudXe1m1+wX1/FMvRoM64Rh0NEaIx2nXbizdk7vt+vv/9OzXmY5M9mOQqJzFmDqvMW/JYswTbovHaRffkCtZ8pSBEgjOUtr+PerpR7dY6epIJMZ7GUiaKljW9wbc266/99TzrrQSeiozQQ7FlGf2l0zli6MdQCZQ7NsBfEouMtg//F+f+/n8ZY0f+9sPNC9XlKQQTP8JEdA85HsjKJBSkZT69r/e9+h9T9uZaL3dOtKbs/MRDuidwCFLKByxCfRQEJDKUV5saG/23//+f86+ccXvffjdzctci1ODdfTC98S0XzE5MuU5+JwdVPt3qq997ntde4didrI6mJNnx72KlJn2LsYhvO4b4VdWf4CcIOvHrJpgKBUMYxO8+NEqfsdKkV+hvgmb6BVa1yNWi+++vjTvQh8bf4mIgl5Et2UkwCzErKqE1eCma3t35776jz+4+9e7DNMwEThMlD1fcFvddctDKsukCwaw0BwCCsi28m5VrHHdnev5zvKVpjiaLymkA0QKT8BzDOFYXmB7MCzikV8O3RCEllIK+MjcmARhM3XKnpxlC9A62AYDJksXXB8p5MsP+JBkZD6L8JgvPp+7G659IDsURKwoSQEd4XBiyEVU4tH7N470wxbpgk1U48mekRoO8poA2k+5pQp5qQ6VKrSOaSNdtUJgHYXTLSNOLp5UjWq4qntn+kt/9/XevcrhOEsKM4njVcRXQx3qUx//wYO3PBnL1kf82kw/WUVoC8qgUR6A9gM7x+XbOQrDZcpJAIK5VtQNEk42wSz+hccPfPFT/7H7+X4nJ7NIqY9upjG/5MtcNT+iHrp152f/6l97d3vEBf05sM4KoqVvlKmy7eXt7Pgr4KHja/h2/bRdFa0tEq/AhE9S6VfU4wp+0cW0aXfdV+woYxQsyPpWZqIrx8NckOEYWHqgDCHoBxCAK9nIwD7vmq/d8MyjA5wCy1Avx5qJuwy9GyFxF9KWx/OBy+nZYTCDCwBPNp2L2lU/++56OyOCpzFORgVsETONSFB2+XakeEWRkDEOpZdr8BKL937ez/HVVrmA2WI+XX7lrEw+4sertUVDimfYTSBMPv4yZjnK6NF7nkhwnK2WsxiU0YwAJYEOlhvErv3hfQDgFKgxphbm1nBxoCEllCdM6uGxVM6zMp6d4QwkLjylS1Fyz/e9IMdZcXjAZpH0A6gcPJLt9zM97qc/+tVDz6nMkAZ6qqATHpW7r/p2q2984bq+PdkqVac8aQIka1IvGtETuEfIlbOG/ciIuXLOSCY/kvFGPC9HYAJ4fBdAVznd3LXSkXS38/8+9V8HNgOzhazG5Cu9hl4m0/AD3//qdfn+pO1FOYtAG/sWiptPAiUgCfqbZ6XJNGsPcWUsudJqMJMfTAcDXn6E0+2IAgUoLR7pjzpbcpnw0i9fOT+vyvmj7seMulEdK89nLrX0lNZ4jctImLCFOzu6Dh8+nE5nq+M1+RTAENcnJDILJCnL9RK5lP+1L37nq//9STHBDGlDCJgwOR7m1QN37Er1BhyX4SZcP2fEXMIuSZKMIlifILLutg3vfM/aRKvI10InE1VubbX07FZrJCkmCQ2oFDowASVux97BXA/JBlrgbTsRJ5MZWbZyUaLO4YA0M8hKdLBytt04d3G9yLFJfPSAJ9hj9x/ODrpuHukcNWb0CqHM/JHSwo7Y+ejT618IBi+zmUAdjdPRNVLqAexGAGV/5VkrPCdDy4xh5wALMLj7cE9PZ392OOvasYid8LJ5D3zT9mYpkz+UieSr//lT//mF7/x5vEoUHcY6yu6ra755x4sb99N2MThKW2gO1vNlyqhhL0gnqiL1jYnzX/uaSNzRa50xJJN7d+3fvm3PoT2H/WzgpVSQoamjroO8EuCzrXQ+4rj//rlvf/47f2rXKGucIEyoHain1/V988s/rHPm+VlHc8hC6zxyOBvMgtfjI5pL1sTa2uYsWd4+f/7cTLa4TpKVY1noh4cPHurr6h/q7WelWti54nrx2Gq+ou9flfg1pkVlasUHVDkJ9Y4PXnHmhVVMhMqRwgTX00aVzSqOeX364S0P/PrpZODG3GoPJsYXQQjBLN/N9jrXfv+BP/mbS9MZFXHlMm40FGjOBoAI1H23P+x4SZd1N9LgCZeec8hfECiw80Fk4ODQkw8fvug9c31k9gJL8ooEJUxcffprfyizSPNI/y395NTf/8EvB/r5lIs5aWrp+7na5th7/uANZ74eFCoELOGXUkP9qhroccVGK9I4yQOcCuTYx4jH5PF220+qvKvpA9zItE5AVEa9TLgsK3J4V++Ddx689N3zOSayQMYxlS+VbwIf5URXYCDdk6hVf/XFi7FuzZLIeMdHAwRPDaAE07Pu9see3fBiJm1FtB6JPrsXZtDxRpRnqZt//uD7/+xiQeQihFEqmQJm1CO3737ozg1Jay7SLhbzaGKPabxt+U5myO9829Wvf9f7L6prVmiEGDoUihG0qfz56QF183VP3XXDg/mBeHZAeFpkcKQRIXQ2OLCj+/ofP3j1RyRfoWNIATx5NdyhvvXlH6mBxLCViaMjo0mngwSBm8knM2dfdOrlb77ozAvqpMymKxRJgGgCdTBYUx/+L6cevnvzs09uefqJTWnVPyZkMcYr+W+xSV/JdTxy3RgzfHOHRwaq6u18UqWjKhMZe6VdeRIk1TmXLv3IJ9/yle/8/dIz5/UNH3RjiECYdDB+GbCOk4rde+ODfQdUko4rHP1YR/9DycvXLMXGh/bv39lZm2x0XdbdAIJ81k9f9qYLM9YAQiieCCLA7ORi1/34JtbXI3yex/RmVjxd5Uf1xVAffTF44tUR8E7wWTMmOS/TP9SVbHL9iFTT1DQbUbmI8hC8RFWSlUy+5Dbrlhq8wFg4O0TKSm18uKfrwIDKwmVIpXzUTuzs6958YRAfgUUyWSDFr0/Mu+kXt8PaCMTqkGNJMOU9ZBgeSiGLH8r2iWIKy4gTXfkEQnPFebpnX9r4ic++6W8//+HlZzfatUIj0Far0SHMi470eXf8et3wYeGSRElGS6OE0tA/r6770S2NyZaoSjAJxMk8NMj47nAu2v/tn/zDh/7mouo25VcJZTzow/KFubhF6axJ/fYfnf31a/5s+ZpmpzrlxvVcknSoOIdDB4kbfnzLSKd8osguvCTfrPrpd2+F6a5LNFfFa2gXWhnwQ2rhuUPtpzf863f+8i//6c0rz6+jUWiaLGKxsovC0EWdGhVvVFVz1VW/s/KvP//uf//eZ9asXREC9JQEfkW9rOBXoTlleNuOHHShp2F88guXiLlLV8RRXGhLLjhT/Z8vv3PJmXOGsof9ICWnIjJ281Y0F3OGnOu+dxvfRsTY0pu1HJdsGDVcPAgIii+rbr72PmQfCFgcnuQ56joTr82//69OaV1Zl1MZw9M5eRbSowOHhx+/vxPJmhE2TbcPCtiRU8EBsTJMi6gCU1DgCzQmmq4gD7ktwx2JgPPVnTc/nBnmhvrbsKvInGrn13zgz1ee9po2303D1Ai3hZw5FRzeO/DoXQekqqNSktsjOhoCWVYYzOhkuaysIYbytEYWcq8UaCTsI/qkWQIn1KkXxX7/k6+/9N2n5qxBwXhgHz0MHyXcZKonWHfHC9RLT8elkFLTvNp4Z9dQR85PA5Xy+YH+KKdC9qq2/Pdv+GTzqaKL5kaRaQnNwyvsGEKlmHLrYRLfecV7V6f8Pr5jfG7Q6IARi+QT3mD0nt88A2AViGyqFKjBveqRe56GVQ/rSCRPpfPx1GVvP+cf/+t365cK/xshayWfDXOZfCk5F8+5lUbjP76oaliolq6u51aaL2zWMIPQI3FCR1NO5wrDn4geacqKgwIyV9KO5oco9M4Jr5BWaGkm5qhPfu4DdfMjw7l+kEEuUSh17JyzZ8teNSATPcEp7Yp/ZRRpTk11bVcvbNyNiMr0bzpxJhh+43su56DGt/7OFelgBO7LiJYidgTpx0N3PAEmlkvxy0AmLNdUHj2/G9WFCS2VLSZU8HBrLl6zAzAiWgZ7NqMivxskFTAHYux8Kj90xmtOsxrUa998XlalLZc5pKQHjFOpB+56XIZuWO2pyjXqHYmX7pm0cseAZJwK7sinwLGZo6HDIMEctCbgSxnDroJzufhNZyPXg7x6MivzZZvFQa/qgdvXS7MWh7dJn0Mws7SRHEWeF5k9/6L5IJr50F+8LzJHwEukUUTTgj4NzgLRBCZjydv8AWVq8+/78OVN8+qFAwfoqLJ8chIJp+GeWx8RffoieSVfSz3z6D6vn2LJlglaxDhWNttXNb//Y68VMRb4JK2ie6CuO1nJ+rRIGuXipblMgnJj4E2KNbkb9fboG2byhF/GN1S94oQCDBvpkLqrQRTxm6vQh4u3mloyr4O3slVdu7r6T9+uoh78l0NH5zMt4pv4gd3dvYckKAF1DO033zvNjpHZLb96JDdARy98h5k+VLVE3vCu5XzV117W2tza6EQdNueQAgoOQdba+vTe3u0ymGfuwp5can7ziDFjUieXMCM8gXrkrmeHuuATo4xQeWllq1sil73tnHxMnfvatrYlLZSRsWXQhznvji0Hdz8raYV5HWOxi0UEYgAIkwj+godFO+Zqml+LRKPLVjV/6MN/ZEEzkIZlV4DEApSqd2/r6NwvwxxHWIqU6VXPPLY5bqOqHxaQI8czl1x17ppL6mkTQalJXHmbShDaMqr+8I//SLDWZnLtICsk55iq27+9Z+cLI4B4CcLy6sF7Ho2qapcvHaG0ZiwcoJXw/vIzv+s2aWSSRI/SUdrJC3yUaZ1MwYu942Qq8+yXlaYvESIct5PkI4FRjhdmQDGfee0bF56yapGfZ+VO0gDCgBvmWVufPUAIrbVQSsjMxVxb5QbUhgc3JVBe12v2DAm0iloW1dYicHGUHVdveNvrMsGQl8+QLInEIkl/WP3ml48z6ywhSynhY/GZKsvQ0lOkifs/EzdPDXWpdXc/FslXiYhPBqwsAM5bUr/sbJWzVLxGnXneikx+GLUSUw6UMNMDwQN3PcIU8hhYsAkrA08lSFF0qGYZLxsgIZ6BMCqy9h21SL3yoloqmmOgiWvH/bTavf2gbjANJYHasulQT2evg5KqrhFJySTOTr33A5eQbmn6WsxuzF9pr7IvE+zPeZfH4N2EDwKNWNRAlq8SVhBjM5AkZzoVSXvq2Se3xu3CRjESEcUHJ3PpG9bWtQB65R1xTJ6V2wkoUBq2E7x8lT0yMg4qzZAO5R0FD/2q+FAClFPGUmesOa13uHNgeNCXLy992E44Vdue30mvhU0r7+f46aIoN95546ahnhGEaTyhE4MIw+n+q95xGeDFl59v8xVvW+rWZ7N5JkcitEqwjTsfveeudR0HRF5jHB4p6jH1ecMRhOBVrJBOupi+1FOnv/6BXQPdUlqtL0L9g/7h7rf99huQ4bDsBh/69qvPjdWhE+fB78CssYBn5SJPrH/WCLBJfAp2ppj15H/HUJD89YJdAbOgp+sieh8ZHs6mUQhWl759DXsBFCXJo4wAweHW0CPppGA0h8w7fbVt844kWhUkBSbyH0R2gvbl8xpa9Vxs8rKMehMWjDaoVSvPb2PNUybQgL7sBqAZI5ufhWcuNlOgdjw3FOTgCkVSJggI8ubRAOli0YZpY4kXHJXNlDc0VnhNGbAQbOowJ9vbCn5No8VGwVUhvBnjwgPY6qzzVtsxhSalQSLZuZh3Dx/sYpxEIvTUYhTtQRyLxGbdnY/6KZFmoDbLA8v1F57adsmVbaz3gwjAYPU8deHr11ixIOdnLdfJZrzMSC49kH3ikWdJtlwKVkj92P5ozksKINEZUnowjElqRN3x6/tgBEPwyvqZhctb11zUIps99VSrao5a+4ZzMt7QyMgwRho0BZzB7sy6uzYLnBUpMCbhad0a4hsiQt8iNWXvcsh2IcKOiXMQHkXUktNbVCRroSuPkF+Hx07GE48/I58TEJ8un1f7dx3iYyPgVXLBvIVNcECGIZsulJjykEyEefQqK4quvIC14f6ibqznUA8UKDhbHdh9AN7VfHFM2ZyI1dBStZTd1+FYJLXyculGsRDlBbajLyunwguRqKxpwuKRy0zoXCzjyfU3pNnJVewTorSmt9BZ65pQePdQBpN9wNIPLdil/t4B+fSLIpUMG5z80q+VmBnY9VwHCkry2UdMk8+xfnf5my9GlCzDht7PGHAVT9gUkwtyiHhExZt1tmjNY/c/neoahQgm8ZlQRA8W+ChdSsAmxBseiGWb7K7n98NliWBcauCh3nHJlWtRwmfIgACU2apiwntxVX1C2K+AjTwoGrD92Fp35+MywHTCx15CzSJJdOGTBBtMUrBd5RDGrRNjd49qnFftOUOePWJ0UNCUEsk6pQRJzSDPq96uXthkGFtJUDvmj0tXLBI41jDHbwg7JsCkv6Y8tlq4vCVrDaCsL0SSfVgs6biHD3UKuFBkTUwUbjH7I/JTAV9oJRp5LQubxQyOdiag1FD3hNJTrXjBl0+NyPdPoc2KhwsPtxnRBRtVYpNQIf4r9k9BePyKrd/MK1bo3lMlNG8hvS2gX1oZtr0URhldMxgRiQZyGzMY+HXplb5CmSvh1KlcgfisndvR9GsuWSQfZp0dY4ruumSlu2Rl+wuP74taUSOxhu3at7Vr65M9a65sLMqypyrY1O+AqeLna9KhOtKjbr/hvvrkHNTuTeAManC1+SvfsQJlWhyVoqgoGSxZ6cxf2tjppYb7MlRdi+0infsGN60bWP0GrYwPrsu4nJkD8fUHQcRheuYIAPDE3Aayc8JqWdDiyIYc5TKVA5a1pExwind8IVjDTZuPivBfQEmxQPl5bS1ADUADksNtHoXTpGhqTuRd5F7ypRE5GCkD9l6Rtnk12ElnEBUd+oD+kEnW4OrS5YvDvIqhddPosvF1IPw3v3j9ujs3eBkUmmE9tbYhM1Sc52f8VLLG/Z0Pvutt77+AjFmgKC5vhKm+Yj3FDvyKreDxrRjkkzGpV7vKc0Ics3///jJeQV6i842A+8CLaseWfVgcZGmfQc6CVU197Ky1y+YuFUESUVAUwAkjFlFve+9VXiQ9ODI4kmEG6SPf8XvdO3/1AB06lIJJaD35oDBjLvPqWH7JnQutypR6/pltWfRJZUjKehmz20vecG5tswSQUCI+0uyFrf7go+8dsftyqJ8LB8nSaiQ36GBiiHQYlnBG/OpBPfZ3qhKW4KXgA6qApFB+z2jWBQs8Jq6oBVtqbitzxCo7iIuenZ6fU2p4WJOL4J6ldu7cBe0FYooOxjhZXUUhxzuZDxav8relhxAirxoaqJfGpbJAlDS8q5kTekd5qmprwntCazoRTXZ3SU3zKtOvHrr9uXi6rSZYWJVfGPdao9l5sezceGZuJNOC3SE32+jmND8cJmRaMLzFM/5J+duT009jVtyMKaAV3MtTodvV1KBaLWMbZ35ZyIPct9/wgJeWPcYsU/IhZc2uu+/g2tedLVo/OrzAgR5jIMZZF8aXrWqtrksktWPfJYzb5g1b92xCgbO0tFc2DMtLcdT+4ngWlSfBpJy68brHUkyJAjZ7wvlQQs+JBq+7aq0VE0ESoQAP04d8W7WvtJrbk1U1sv0bCoDBjh97YdPOvS8EYpFC8PDoBfmagCbBELOYKkpyxWqDaFwuskYHVTmVHYEyCO9Ffi/B9AqolMc4nglTLK7wpPgHDfywCYrPpvVXCkLbxaigIFh5nHB+WnxYrE/xnr8htppnQiVDK9mM6cAwPvnorohX4+ZqHI+riiuSq0KvzfGq9VXj+EmbnVZTuAmynSL0SfNqbBOeNAV/aQo6casXOigv5T19dlBW3MZ03NraWv1aGDS5NDYNHVJPPvIcu3pEV595BtYNLG/xqQvWXtaOHgWIQHsUx5kefQl12ZvXMitJY2k0p9cHgryftRGoI/IQQJlFV6bpIPXyVe8etf6+jRjVEmNYTLUw/xUMrzxr+dLTYkAAGGd6jyESDCPbld9x9RtTuSEqQXjZORC4Q12p265/gNJq2eDRF7dYxzFwYyaMBUaumCqsGGxpbzeUEVFdOYsK9Yqh5G88LuqjY5zjgIBjnk3rVj45NBuqLey1krYWkoxDLpNUsT5lCfd195TdFbxMPpllkpDXr+6+cd14Q+fjo9DfSN1whYW3xDfXBKFfCY8q+DWuFSfoYOPChA+kv6i9uxnJTJcQdJXoCf9l8IuwfJIZ/cj3H7lna/+hYZgYmC+ei1lAlw2Pr2WqCH+ATZVIRsXMlVb2kLJS6opLFrNLzog8GMPAQlW8FpM7CjGxSGtm3cnYYzSiM7V9c2/3vv4INsqE+QJf2V2cft2VF0SrpEYuG6SwepBR0ayK4cmKXsjac9trq+ow5M+8TRBHTIYl7r/lkYFDx4q2YXOAByLPKs0cC2ytBgsKhCBMzrnIq5HB8BNQIg6W+ks3FnM9LISMcuAO+/DNZ8YkOer1lDfSvpY6dIhS6JYufYPKok2OI/v27RPgGeuosAASWy82bngahTvfSesLW2BUFFaRTyLRJqjs2JReufcFocArt4JHVzPD0ZhfQZ9xvUo+iGMcVrpSeVTl+d4XIupvL8v5IX7Rx+jYfJ/X3b3BCeKY2UnGxSiMWLCJpS+4bLkwL556+Jb9X/3cf0WsRCxSxY7pbDbHrJFNw06QBAtM6qL9xWaT4chvfrb5rR9aWTLPogs2tnTjyj+m7JPdgrYsIW545Bmsu1hiQ0NwGRsy/bnBNRc1gbZMLR+55fDXvvQt2e0tapeFuooRQT8BNVgwZe5DPFfFhnuH1t364ls/vFxUE462SOQstBOa6h+RCbFqOwq8JAjIBkPrsJKwfesuAFeilLnGxsZScyjVumBB59YdxcKYwNZTTz219JzXjI1JIuNSm7Aag4ODJR5cmFkhCydulJVCRWLlZm4KWSEtlXWH8nBCcHIVbLr55jtjCTeV6bVtMaGD5DQaVNEHzLx4VKRX300Fv0yb01FKrNMU3QCEko5T1tewH7d581bYIt3LSUR0jhBuzZ8/P0yH4OxAfuKhw/t2HorZ9SMoRUgolFm9c157ekO7KHzREvff9Gh1rgVTVioXxbYUVhSDbJSPcICpLskAJ0xN4Lm18aZf//y2N79/JQZjmHVOML7CvI/BA1z4it0/z2zYHHOrRA2AyV/e6xvsuexdFyZaJEUK8+hdT8XSzZbgV4F0pfkaWmw5UfugtJjpYbvM/bevv+y3l2Pc4ugchNOkBpqMnJ7o4jGIxk0R2kT+JcWUku/fhao9RQISSu3UvngRSUEoeSZLfsuevmeHNLo8KhRqaGgIxiY0eXRURaUgO3fulDYtUsNEZ1VaPCYLS9ErxgM4/FdZSQvZyuIlvdJSl15x4TlrznfyGLgQFhIVsNt/8egTD27ystmIE4clPT5seKEYJ/ifaQ3aE7wOs1c8+dxN05kOSedh3vfEYxtlemXGknRf0MZavHixGR0kKHxZoO695SF0VtFCjDhRtgThEDbLhm0xv6MO71Lbt+yqSnBWGkfbYPvejVXFmDayBdLVxqnAAlM2GapBdLg3c9v1z1h83Y+i1EeoHMWW0UWCI+ru39zf3TEQQTFX60mZ0r7ujRdqrkId3K42P73dZfdj2XAF1g2yM4wx5mHMMbKgGnMTe3Yc3PR4hximHj98xxVK5k1lThCG2TkxNQCFv8Yj2G0uopB4moJtE/mXSUT4IHm8EPziy2MSttXixYvLcih4ASC9kzJ8o/OTDMQVqqZT41l4mbf8bnn+BRNIv9L5ApTLlknxiKV/Fy/lpKiSkyop1dvb++yzXaWn2icWjBDeK9V2WvVpa+uWr40tv9DmWnK+WnT6PNlkqR2saMj1j0nh1XBbwa9CK8uX0zjpeEd2EpreR+Cs2vLcDjm7UI9k6VQ8db2WBY0MmILz1aEt6sVn90RV3MuJKVQvz96gFEKjFefUSFJpdd9t65laYgA1NZz2vSxWu2IuoxaJjOmgiKAYqFI4tv7CadTEGx6+e6MoLlIMMw6KuU3nL1ybGVd6WIUxNIB5aqBb3XX7gw6bcWQrs3CcXj7dtqRp1XnVEitQG9Y9BWNFMdhqMOoCIkROJYZePX1UD340sdgeev+t630kekeS2ZUawhRKN4cBI7iwsKAmmHkuNEcNWOwPiT4ntgPNMU6mJDLjtrymuXH5nOgLT/3cRN5GjZ1l4GKSeXfLMzstQ0/DphXfhH9DvptyhJcBJqB514sd2ioO0EkfoNmY6HqnrFoi+UrbyaX3t2KFnL0BRZePuEHtw/c8WfYdKpSJMFgHSufZvC8HJHClUQ8W8zws5bArw/JyVLqYzqvyb9h6r8ral1c6yDc2NMgIEca9wEuVvy/3w9gTRjbxeOrH31ufGWSWhAoPSgLSUX2OGVUDi86W2QnIwvAhzcfWvZDuB45ElsQDjOjbNZk/+vj76NMMaXDp7l/dF/HZFKPP8vKZMgYI+fnMo6QkPZbezbRVO0piY1w5a3XuHXhmfZ9MHygwhTHjuOyX59Nw+qwPrbMpgYniq/vueC494DmyH4cKMb3N5RMjb3z3xWJQkAdpdf9tD2GQCz4w5tjhJTafXblgGLkY7cyT+RU4y8e3bzrw0B0HgXtJEVf4Y25KvxTfoJJ5BEHlM6G5L03L0C8e89ws+rH9EnNfP/rvJ7zhSJBlXiU76bHhwTpodWNMjkdkdgm16fKuWnC6mtNaPZjpxZiYMJgWrE4snmm5/dpteZYRCaazp/DhZcpjbgmhPxmEoukk7EN3dwwchLlOwDELZLO8jF1oO710VSviQv1p0wVw2euOkbIUFqI1j+g4fjyenbv+9i1ot8qKI5U0OemmoFzQj4c0MrpppuNJntqZpi4GH/d3EgrrcGQyzWtcsifSg5BWJ1KhXvKyMGDgiTgJVSSp0xjzDAQxhcrpD3vVb35xb128GavBfAlJBz4gG6TOu3gVZrxkGJg+NKQef+hp2CtO0tBh8pyUEau3XntlqxkoD9y+2bczuchAJtadiXVl493peFc62pGJHc7K1ZmN9uTcYU5zACI9Ri1p+0520HrsgWekazOGplHsCehqRoL+FdTQ4DXcp+696+GIlXSwCyM6oOBXKlnvn3fJElPapzbu6ejdO2R15OJdfrLHS3RzZfUvnlyy24v3ZexhTgTwZU8o49aOx6r7OjIP3L5B0AFCTV5axmR5OQsE1I/K/eVh6MTynfCdPZszD971WMKtR9GDAOAIpnEBi7WXnqNHqxy5IqnTchG1+vxT4rV2OleQr8NBx7362657ANM67P4Wpk0asHCF2YWYUO5hN+KdNz6S6neq4w1gOPUFK7EqkXMy51w03xymK182jRjnXHi656RY4JXi8SGyo9F8rTcQ+eUPH5clXconYC0XTlIyF/Nw40H6J5PZAoRJq03oJnk8YdiT92FFfi9tRzfiM43tX0TOotgA36SBffRQKoCR6RhBVjEb+voXb7IGGZtx4RpkVxvL+17g5NjMLF9d3WMBl4fu3XdwD9ZTsRwPdybG6FmuPOfi1XLGKskho33Xykvf/o+lGUR558NPQsPqf7+58a5fPxLJM6uM5rKGOcpvfmpn785LG06VMIxhGSTTc7pqhTFgYgimcOXUpg2H9+3oiNkNmG2Ac0DyxSljZ164onYhe9Rle/PZF7X/8OYvM05LBR6Tqad+8JX1d9/4GCunRDfKH5wuzsaDLc+NrHwN9o+LA3RMRH1bPiYpESzPZE5YWRCd3Qik16/+36e/3nPAb4jXkilLKLzLBhmnyjv34jMkRAGhpeq07+vfcvHGx17w9mLBhr3RfFdkBQaLhl/5h+v/v/98t1jSp5BFZxqBO52hPMWDE8Owg+pXP3zi2Ue2xlVTNBkjNZh49rTWNSebly2nidnJRBVMjcn3rAuX2N/x/AE2iAItJHNgAAAZDklEQVS67CWQuWZdcs5d1z1yyrLlr7mqwZzjzfovzuQyyiMNLdylvH7VuwoVpAvIEGVWlOIEm+K3j6GtJwZ0XD515pKdfubKqr596sv/91d7nj9cn5yLtEVzDWweRuspiNe6Z180n84pfYx0PPXLn93MoWt6YiRjEfEHFr44oAEj9HpcydQArJDZGRInFprQ5I7L5XFh2YInterc150exNJ5l40yjJAg4sbQbh/qyd5z+9PCgoU9XSp0ZDeWA5KyCv/lj6j773yEPeMIv0SKZMFBZarqo4x2hiBLCLKKgGYIR3RL2QJWSvkdc7Em+ab3rvUprQNp5YQxZGLRaGywd+Th+zaInue0SyuM1fgLAaNWlxMR1pConngH1Zf/7kfpw1bSrcUOdKn+tlc/P37amiSVo3WYnxW2HtqqbXU0Wp+vakhkMqkC0+fZfsreuanjm5+9L9OpnKwK0spL67Ma9VyVZCm4NKGwe8qmIsNq3W923HrdA7F8VdSO5TLsYoKJz0biatDvfMPbL6ZDGfwtoLCtauarS648z4ZogQei0pTknkt7Trb6e//2szuu2xLJylEpVE2yGM0GSm+U7CuuQIEKfgkh6ED0u1gELVLZW1u4GAXobDO94MKDZBeVUXrzAXXb/2z+h498q/tFvu58CemE0qfks8tPJP3u972VIyeQs4qALFDbNqT2bt9fED3LKW0s0YupqRVnVguvJ11yqvmUaShCnnp+fPmZC/x8hkkZ2lmgIfM7zOc/cNdjsB4GERjt5ZeJO/5XRoWAAiplZR0Ar6/27VIbH92EgT2OZYVPFOsYdqp9+dzlqxrAL/g7OAgBHz2KqDisq9R/tAP35p+mVp67BP5EHKIpuBmHVdXoc09u7T4o9KQA4o44Gilq8fthPDKqaSOwg3YZUcEhdds3d//Z1d/Y+VSvk62JqCjbgMhTBOj/f3vXAiTFcZ5nX7O77N6Tu+PuOA58YCOBCJKilIQkZAkkEUAWskxJFjFxKk5Uroqr4kpUwXHsKle5UuVItsvlpJTEFTvlOInLshJVTBw7eriASMiOH4XFWQg43k+RA46729fsK9/fPTu3e9w1aA3agfuaZa93uqf776+7v/n7755u9SzZsGkNXgzAc0e9T4i6hr1RbIqjGefJT25OF3BUCt6yLuCFb+CB9WqF8cihXcNf+NN/f+MlGNLkgOEIrI9qdweIjMdSHkt2UaAxyzll/e3nX/z6F58vjcbCgTjwg24t+AcCsA/0Le64a00XMkLr0KSDm/ATMyIf/oPVwVgp42R0vtjJBytqsFCuOB75569sfeZPtu55Na35C0WWRyDdNAhw/CjAoMEVC8VErOXU0NlweM7I6Fks24nF5YhpNGrwWnq0cOr4mfPD5wYH3zqw7xBWeAcLzWUb6gUs1UJT0klBWXah9z2z12yErVg1WWnv1o6Xf4x9irGVDlgD0bBNWDDs3Ld2HbZilvPQFJXgurkrIwusBn3kibVP/+KfYnYCncLJFmbF4k42ff7U+Ks/OrZy47wpqlhpLiJMVfIeY1UuCiEBAXyjDNtefBVnh0VxsrgwAE64zYeSZbzwGMEWEhgHwYAnSpl0YwPzSC8NWI//3oY/3/HlZLATQ7NSoYidF+xQbPj42M5X3vzA5iWib07lZIm99Fpxe3+OyVpsIF/z6g+uh0rh9PniycPDb+06sG/wMM4Dh8EL2+LLO1gQS0ZkOMgNxyll5t/Qs+qhBRmRVmxfuCx1Khs6lJva7Obl9rqN9/3wOzuzIxkc2oa6hlaLmb0zRy+cPp7+0t6/n/evHTfeMnDTrTcsXtoRa1HPsDLWjuR2/fRXB3515MDgicyFcqTQLIuzQJb6GQWzfSmXyg9v+thHMWOoRrZSFo/CRI2dZf3Oxzd+86svjJw+1xKfjTcHsBOsrF8rluN2z+7XDu4ZHOqc13zPmtv7Brq7ujt6F0g6sNQdO5Y5dzL15i/2p8ezvS2dskdR2rXfSR5whlrREa6v75nMX1LV+I9WC00EPRMrqr77tRczzpheeylcgzNRoeaIcQTWFFgw1HewNYk5JjuGkQJ0CkkE77RgD68QTg8K/PGnPyJLUSvN6OwhMMJOO9AcUqcYoonjxJyu3thtd/ejRcoYU1EYOrwSxdS4wBtLb2+et3DOmaF0WXqGaHxQj2LlWS//5/aVH/rIdDcjKvQVbxQGvzitFCgmUj/lDJyxs9a2l16Lh2QNGvaARiHwckFnb+KOVb16nIvyKnuNFA+UCCfEV+uAGwqFBQKLb7ZvvWPJL7cPxUPN8vqe2KrDmVQQb4Cuf3wJRtCCKJxKANzoaRrgTWExy/r8Z/8qUoiCrWpzAM+Wi7kyOAsWOnmTFJqWrNJwpcI4EXJiPiTSnv2jv3gEW0JiwgO8JYNfJa3KEOlDIbQe2rx034GDR3edy19woB6iPqKRkIPpS0y2pCNv70ldOLbvpW//DLyHcTDeCctk046cFxXGFpXYbhZ8HsLks+SgHDTagBPvLK9/ZP3im+Pg6MnoKBUsnLDev6Fr79CS1/5jMJh3N//B9iJifsVe4eGmIk612pf7twPbcAI5NghLJpPePIMcj1KMxgPJ0ZExNKZQxAbaM9ZNbhnXNxDoydLQVFtDJwkLMbmVjxYfjyXyY3mrkChkMrFkEleEXsQM7DrpmfBCa5Imo/7IQxdWlbycax9OfeYLT7W/x8LMI4wd4Av0/te3D4ZLeFtb1nnibtiSgpECXoFu6VE2L5X2RPtTglVyq/krCWIRUNa6f92Kb/3N92AyScQS42Mjto1ZKevovhMyQJKD3sCJssxi4maly4ipWDyKbbXqVJOXLM/AgOX1H+3CPtHJULfMn5WhMzmYVl2xajlOCdMKFxKuSXyK7gmaEsVOXMlave7OI3uOOufzeEjAyojsY5Z94sDp//2f43c80KdOm9VRkX4RS16xbzyyANFgGQmOqBw9n26ymuSUgYtcJBB2r4KeRAMWJ9MnQD3kBGIBJzr29F9vae6WI2wla9m+rAoWHT9oRdusT2z5wBc//dzRN4cDpRhOwEWlY/9FOQYNQmXK6TQU8w4HL3PlsnknFipHY2o4rBLQqUiypYBThBjYsDqSuvnO5ZuevCUfwuoteXFBjuQTonezlzYEBTBiPfnUylK6/NNXBgOFaHa8MMuGqV/sr5GAjQ/MY2guUCJBzXEnbqXTQB5+NEu4dCiNtxuKIWxrKftgSpuuKK0Tgs0AHypppjipZNWx5LtkjVw4j5flZNiHthfGhGAQOzWDdWBVampJ4ltrLJhK8j6YYpQdpdBMxc4PE46TL6QKVqpsj7f3BZ9+9qnuZXLYqh49CaxZ6wcvvIx9CvVhf1jzBVtScrb96IcekLYMs440bEUNMq4RXkX/E0/lAybwPhAUL07f+8G+ZEc+bOeyubFwBMKIfThYtL/8qW+LkQ5v9QpRIl39saI4R7BUxFgYnRu8a0egMcJorIBAJGg6IoIIA7Z9+YXXOpJzsF2iXAngVPFcU3v43rW3YR0FCiWyyXX51h/JxPtR8eAvnCQbslas7Yi2gQTzeBbIsdY4YzHtBLLlHf+1QyIh00oKxXwO2hAUXlklgBcScIxTblZXfG4ymkzE4hd/0Hu9D15XiGD0iNVeWHUVdoIJp+tG+5lvbOkYsHJqJ0gQIygR3RwUAOfq0ZK9CNrUbX3yc48t/q22SCJTCowFAlkAi1PjQBa4IRTGyxIoUzAenSWGNXdDfcDofsLYZgMzN4FsKJZqn1de/fDyRz68EmVHE0IjQsV6mq+uSvUMURwdsj7+2Xs+9mcPF0L/19KGWsvi1TAQoP4AKyihQB3LgzE8jkDlgzgwJRbApBn8xGNCr6hA7liIq2qi0lqkYDPCzTD9S2lOaEZl7PEQjRRhkIe1Q8YRaFRCa3BBkJpq2bBn6SagtS7Pj+YI20owjJ3p0+O5UTsZXPXbd23+w/cHWqXHYpQnPKE68893vC2mmRzMwticAQ9i7LOaX7RkYTteA0QiyFPnIP1Kkncl0DlN9Q1ewl2Pblrztb98vivRn07jjMg8kgoWA7t/svfUoNOz1JbJQZWaJIDnPE6VDMix0lATsWob67bDswKgCdmtquIkfsn68dYjbx8eiWJjWJmpgw2paDeVehZ0dvTK65noy9pNEnIir0pqKDqUAqiB0v/D1uO/u/6bX/o+VvGjfCBpsFE+Ozb0xv7RA1bzYnUPQjBMdbAfkMgpx5VgOC76jxjasEUHPpW0vb+AD6WTfBSIGAsKMcEeWbKcx39/w0Ob3leM4wRgoV1RUF2a9G6f8GhI8Qrqp5554sV/2b31uVfOvz3e1tqFFXpYIoMZQr2sD8lIBtCA5NmgTIOi74ipDpMcpWCupTP03mUL1j5638BvNokBAWNVrK7AoBkSTOP0Sp071/Ut6Nvyrb/7Lmx5JRxZBAOjrGeG0ibqNCq8XMQye9DTZCeDbCCGZYuF0WgY6mtKSgmH9uv6Jt9y/f2eQfyFWoWTypU/Vmt705HQMF4iEdtXZQCiwia+MPmvVrQqe3Blrk2WoIbyo9kz/e/r3rB2/V2rb2ntltlGV+1SbUh6ddl65QfbscQhYqMxlvDKYygeKkUK9z54h429W9RaBEz0i8EE0siI6RJOBqRI1rbuvv+G5/4hnj43Eo5jkbt0D5hgijjvZ+cbD910m/QblbtODjQECwlUMOw7KEpe0bHjdjoHo3h0gnrQEx3rZzt3JeymUDjq5DCNli9YuXAyeNfqWyEq+AO4TcQ3SoqYEAGZiRhha+Waxf/4leeLoWgIyiIugeKxLD1V/u+t2zbOvRfH34orAEAHux2GohhtyRo5SArPdDmC0bCoFnox1pDg3MlUZhQPm/mL+pfefMOGjXcme+UpgrEb6Ayso+BACeFxbWQVRqn8VXJi1dWDH1324OZlLz3/yx9+b3tqpOxcKOayGJtG8Jp0WCoK+g76SwnrLbBaPxIL58ppnNfdPifRM79z3aOrlt83B6WTj3pMAjKVAVKf2smTAIERq3d5YMuzj53Y47zwne8PDR7OjZXzKby5EAlgCCqjzzB4U2wXF7lCMGt3FG9c3HvLiqV3r1VrdIH6dKhNLcW1fXUG8RcqCi0Gj1O0qkDc6prXsmzFQmdcuAkGhSlr3bsIFsPt2M2mvb2ts7Mz2Ra/6dau5Dzpn6lMuYjd1pGmaqjCMtoFrAuZswNLu8UmgzFA0CnDLlxO3X7/XHCQbuVolkp9wJ2VvlS5++K/SB/xcEOo1Xp406rdr++X1eWugxbQcfD4PitymxtJXw9YcxfNjsWwmkOGvCAPvBsQa4WpD4/4yq1KYLyYki6PDSzrwUBIigLJwsV0/vzKtYtkvlHbpLyiVW6d7i/iY0yF6IKJzLVt+Mm23XbIhoULRjVZ4lvqOjNyEqd+KJ1TOvrCm+aOzsk7GSx2xd47sjsHjNlqdD91JtDY4slYS2uit6+3f+G8nrnJZIsVSAqwBUVvWCGGQztQ43juTJ1E5aqUGA5nEwi3Wg88sfyBx5affMs6tPf0kYOnjh08cX549NSp09lUNmZj1Vaxo6e9vaN1Tm/H3P7u/kV98xclm+erCtXMhUeRPJNQJslXa9gqg2m+EN+WxwPWo33ivR/Eg+TkwdLBPUeG3jx89NDJc8MXctlULpsFJgAEL/MnE4nZszv6+vr6+/vbepK/cU8TzIOYyJCPPAdnlgsMDQ0NDAx4HRXtBgDon54fHu+KF1PjZAjyIZAYb2CAAaUcD+LcuExIz4qqkxrM1e51dYFGFQvfqrHq9lLdRhV+0m8xvBCjhA4DveETsbJpK6aHmdAgZOkigkWdcx/VVZBVc4UwgeYC+aMMtUgNF6szRhHw0VLpdCAkIiCmto3gG6HqPtk1DNSny4J0MM5S2+HBkx6xsOML5EdnwNR8sg0TYNKxvfGjTtv71rJ5Pyc8FUhlmKWlVaXInLPieOsAoSBxkUHsd1gjq1UzERhSycNChapbJtKs9iGa98F1XRbQBnKD5gWUsLeaVIY7myEgS9vV8aoTmvBDFnWHiqSz9r7hwa26ILgD8uvcddb6py5ybQ4VS+NELtpXjRvEQr7uFWSED3DQOeIbeIxZYQz3dY46L50K/PqjMpU7wIaXrSnrNK65b1SrZvP9+/d7D/BrrhT1CIyGIjZZNVizmy184FQLecepoYHBVbdYafpoQJXmK4OHqnkzfRmbgKILSAeFdRwGcfSqd56/9HXFRDX8VclXkvccZMB1ZXzCNR0F366oOpq6KoymPHp7L93rEkm3E01HXl4+Bo9Iq2FSkSR9LYHutRAF/+RtZ9jLXSxwLK6uKUOyCJJSqHGVdHZdJNQIjueJSEkArnzVUbs4o0hbQJGswgTM4lXrhH5dwVML6cY0SzxNqBZSOBfZqRxdICp+Ia/pnI6jQqu808W+3q7PPP66qJKhA9XRP6u6pNsmdBOftsOofME7onOptVVIQXcupReYGpbXeUyRzGG61NN15lpMUAR8kKkui1ri5vqnyKT23ikieMXUYUgWCGC8izM4VMHUqvdqdQR7H8rCOqzDwOFjnoI/ZcpqNCCpiL6lV7rIUhFFBZKkCKfiuN9TJjJxUTGI3Farp3lQIGalytRNVXi6BbgMNNzsporpSowY1aHVfvdm0x8FqinC9RQ2s/jLqzndP/HT83hBV9WDtoWOiflLdWoiDEvoaVqVmmi6V0MADKnA0bD9wakFFKZMNG0hhlpNBeOgKXJ9YaJr1brqXqfJC+Eyg1mn0/pxnTd7t3nU4YnnXfHi0NNABGYcf1XanzyiNe5e07wi1VCTWiWz6pT1tYqlFdqC9xD3PBLdE8+7tyZlffVyejfyUxOCiAuTlus8T+VC9V8vI1ejUWH6ovG+6jSm94uOU5OMmsRQOk9F7YhgE3vlV0sfpk9qqhAojuoyvvX0pWu6VQm6+bp/prrdu6ahrbYSeEEqKfXrMhLSSmH1vfRfQQRmHH9p7LwuVNuVriCwl0zK5R6xgF1tp6x+yMTVZi7V64CJJtWrBQ76dA1XTy7/Fcx3oqIrj6vJmfH3tYzADOWvRlfZ5ShOV1TGS3GWl5kb8aL4F13w7qjL4yV3BbmqVpCLFdja8Mv69a7X02VJxUgeAqwgDwp6iAARuMYQoP51jVXYuyOupx69O9ldwVx+fcl//RSuYHGYlBkB6l9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RIH/5t24oGREgAmYEyF9mfBhKBIiAfxEgf/m3bigZESACZgTIX2Z8GEoEiIB/ESB/+bduKBkRIAJmBMhfZnwYSgSIgH8RuDR/lcvlQCCgS+B5vAJ5VzyPF0QPESACROCqInBp/rqq2TNxIkAEiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCJC/6oaONxIBItBgBMhfDa4AZk8EiEDdCAS5b0Td2PFGIkAEGouA6F8l5RorB3MnAkSACLxTBET/yufz2OQLTt9Mjeydgsj4RIAINASBMHIFc4GzPNpSVOZymZbJo7aGiMhMiQARIAKTEMCgEVeEv4rKwaMpDAEkrElg8ScRIAK+QmCCvxzHAXNFIpFQKBRUzleCUhgiQASIQDUC0LhAVrgShvFrZGQEChf4rHoIWR2bfiJABIiADxH4f7BLvnVvgd79AAAAAElFTkSuQmCC"}}]}], + "model": "claude-3-opus-20240229"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '74598' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.26.1 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.26.1 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAA0xPy0rEQBD8laHPE8hmo7hzFA+iV8GDSmgmbR5Opsd0j25c8u+SxQVPBfWi6gRD + Cw4m6Zpydxj9cv/wKIjjbf0Vb7rn/c9YgwVdEm0uEsGOwMLMYSNQZBDFqGBh4pYCOPABc0vFvuCU + pajKqi6r6gAWPEelqOBeTpdCpeMWPYODp57MMGFHRnr+FqM9mcAdm3eeDRrPU8K4GJ5NmrnNXo3H + EKg1r3CHii13sL5ZEOXUzITCcduMx0b5g6LAnyT0mSl6AhdzCBby+ZM7wRBT1ovZVfW1Bc76n9td + resvAAAA//8DADbu1uQyAQAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 8922f05f3c935e61-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Tue, 11 Jun 2024 16:22:18 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + anthropic-ratelimit-requests-limit: + - '4000' + anthropic-ratelimit-requests-remaining: + - '3999' + anthropic-ratelimit-requests-reset: + - '2024-06-11T16:22:35Z' + anthropic-ratelimit-tokens-limit: + - '400000' + anthropic-ratelimit-tokens-remaining: + - '400000' + anthropic-ratelimit-tokens-reset: + - '2024-06-11T16:22:35Z' + request-id: + - req_013gsuW7HzxuPzipsWXNSXD7 + via: + - 1.1 google + x-cloud-trace-context: + - c1cb316d45820401847cab8db34d87d9 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/anthropic/images/bits.png b/tests/contrib/anthropic/images/bits.png new file mode 100644 index 0000000000000000000000000000000000000000..ac2eb415274bb6077e38c5c40ce996eabfbd30b5 GIT binary patch literal 55752 zcmdqJW0xgO(>C0;ZQHhO+n%;LZJX29v~A9`ZQHhY+x_&M*SgpHAKnkSsvxq{e;q0P;h2Dc zgn*>Qgw;HOFMOc`^u#m#I}2<7Udtto`X&KElL-YljuC_X1vA5RRn`<*9)w*s)z|R& z@NPekYHN!qIBAQZOrsj{1p`3=`OBD;U>cX3dzz8kwFUU{Ro|@Lc72*~RV0ioqG5D6 zTdaJoI9t~5{;W8wq&eITC(Pw^$gQYIHQ%+*q%f6JM%xXCoNBRi8m0wc5XTLFM0G1} zB3=0<`$Yzb36Us~fEb}Gp@swmC5;v#+-CB6Fa&`~2M*pKB<|n|8bm^oAQ<$LjwL>f z>2m?b@JT*UfWMzJnu8K*XMqD92=XRL#KOXw5ti7YkP3nH*u}&q5(DG$*mWY|C5TEZ zv^xxgs=sAGPJogEqX-0|5?_vNV3Y617b}S;6-Sxj+8NROexXnbvSVB1$nrj5CP7mJ zTFA6)RNhYN*=u}Mebk{3C33}FEP-LJm;?(c32%W&A z9%=6?w#`tw`+L<>2$c+(X}|Az*CGgkT^gs!Mir#h6Oj4ti4rJH3e@71jle!3 z&vr%ddmj^dV;0Z6frlKFN{?lOZ&e!Utb8E**GQ?$Ft&f<6;1apl4fkqS9 z5qqoHyCX-mknh*eDzJok*nO|HGz?3Zy*W!-GJn3UBip8s1LF*bP$kr9$1fd)fb< zjUq{?Hv~r&4tIVYx%B8p`H3O2qr|cyE(g$>ZIIdhCktIT;9kmKHEJbaCs59ONHE=Z zC!S&!@A4KUbJW7$pVRyKJ&13)o;}96k12yPMFk(h9y%{U@`rP@%Byp`?>5%C-+}fQ#W@ z`OyZFamoML8xP3p{!>EwxR|NAs^>&K*@)wR^-NY^Ebr`A$lB24<>tU7gx@+J2t&-| z^i?l{EnEMMWq??o7?=e97O=L6xAS%m+(zI{v?M^|*-L8(O*C$scSic3c$J{RXv+LY z0dF!sMk*!W6+1*q!k^B!2?YQ9Iy4xWzdDFIwd%K#ltF+bbyoy7Oo;v?z`hBZz>4I5 z%B2PhJZiWoqz@Lq{3FPfQds`6VNFEzQi($~&i79-W&RN;WilE6)Po%<;awd**__}XLH>_`Rx6hK z$0`n#)Mo|67z^S*0_7h8R`DM{O7QoMRyYlQ~Ewj+zv` zDiem}-*Wn-5o{@}n?Z1qUgLog{HU*>d$FN2GkZo32{6~Df~T7J8{KD>=?<2nf>C_- z4?La&mi|5DfT>Wi3Y{}YdH|s*nQU7t7)Y@n@fHfvhz#B-6xzyF%#nG|kW>wEty|)hP z$ln4~|6^|PO9CiLy#|A#iJmihmHYm%gr0oULo6?k=?0C8y1Kb|g__X|z(6ORhQ@J< zSbqUc?w^9MLS07!#YF~QJZe0c#)$bXvJt9`^G4JM?nAk!=V2R0s+fiJ_e@I`zk)-(EzDv)o%Oiq>+ zMO;Z0kwjX9A#N`foq-S$_8Uh8*bA2VNMt0jeJzC@(&qO{eQic(ml#>?Rr=E#ZIFxk( zS%eiU{02l{KYm)TiEmZ+G6Do-kgw=RZ*LCoBa`fAr;Q;Q-ySp*Ez^CW;nvXm!g}%* zl(5k4GV-nPhzVMFz)ZIoPJR${F3BZynqwSfyb)DLeY$xAl4pej*8(Tj88+9Srv%SCEcPci<6?+mIoyUnPf5tgu(5><_P0!0Kd*AzaBzT;>oUWt`)HW)C@OD4;NjYCk%DV zBo6+oTmaG1W>p}SuS}nr(IOKmhrx5#c6K5b6 zw2rW(P-I_l1N01&Izlg$XeX7>u@WnqMmW|{6J?Dob*a*uITx;f7;q^GR(TbK^1S@; zQBblXXtF0K7wp%>Cfp(k69wLXW~K{mrTRUD(octHvdhHl%CxeK%I8|v+Df^wy(x$l zBNk?AfdXnARbx$1reuU)P*K9u9(jZ{+DrEPIGz`3tf*sje6ckdC7cNfE3D5G28OJK z^b;fv5HZ0fC%(wcv&7?&yfxgI(AppNt(W}}ABmaFT>bJ~gXpNSyv3x*$_eA5T6^eM zBT<&Ye8He}XePlQqWLpDLYAI+{z$n>Uo$O}>yg1|wPUVu`pl~A@Z&6QgKF$YPi3)w zv&FIn9^oBG!9QdQ?zqSeC-H356NO0kKtqXXn6W@!#DG65K72tZ?59XIA@C`_=W2Y4 z9Yl|o(?`2vFNs>V=)Ik; zmm|o+nV9uh$XF~Qz#^TZG18wh{*RJ9hNFg(J>g0ZBzcoAVZ=#DxwD|MS0KeB{vN@*Fe6LBgbHcqfQqZ*Q+}Z;%kLeIFmho^&xexC2l`cvKph z1@kkg`~{#9TZG5(_v;ZYR345PAlM9^<(2~`Gm%`HDnORe-BKWx|2-hIl`^G+_Myee32lCtEx<>!+YBCLw$;r%;7WtTO1Z7 zoyh#BrC00oFR1!Ze$le{y)H2i55CT~yWkBovt$`f5Kpq>F4hJt8T)TWE64%Y$>r^J!_%0e_kw@RKbjt@q(bYfGU$k9$*Cio$*?&Z zOwCFR45Mh4+z)Bz`Mt9B!3-D=7%@BnpQ!>mz8(_e_EPV>_I5;8-_3B5-QN68Z$!9| zzRV-y!%-DxCUNveH78+PcVgzqbJ6FdyDLJDOek>E(?DVt9HX@L@7bUe%gw;ucvzt&?XK#WWR&mq6!t7=HSwK_!z^E(|1atLGDO zNq#x+II&_XrU%6i4?J@}P(?cb4K^-MP;KU=(-!Tu55mzrx+~N9WGUqPmgpF$OucXr zRl?Y$s=|(?)s@Dvm@URWFgRqX^0HVw(|vxY0N_%N!@r0h>eoBnyUfTsQ z3UBaCB+O$9u|f-n9-{0ANakTqF&Cc?Vk*`3){BTZvR3=G)29WTbjSf~f&Lb`t$kMY z_@f6Zw`LtyW%bt;g4uc(Pp}CJ=hBj_TNF(dOhqk}q7h5A(HxMpJtxiIG`S=c^J*rk zIdyiR8h7-UlWuNq?Jd_em3A@`M(o$~wsKH^ex<5}0On9M=dQzontx3`yrY&&wCtlx zcY%KICdS^`9h+bIl5*VR8U4d@vm>-4^T~H*>WmO!A9Q-Rgy9OVxBcsmjXV?;6{*mI z&_UR!>*Ek8^xgYo&Z?QyX|gKK3Th`-LbNo=2>)?*72eX-ux!I| zis2qomfC16f^3s}NI5lOq*=XTF2MayO#+J!fL=4)A2L<8UEmOu1tVmSW0{I~hz2Ym z!loftji|y>e0sNx926a|uQ(y!NYjt{H#uDAdv5ttnvgcn@4#mgl@~EyZ)^u;ZG(-e zbqGZZm5XSEy8D1I`1_RUc9p6 zUU23IN;I18;cIWy11Zi%`M2#P2}tH^&6kvb`(1PH8MxTJ$6f}+$3C&OVH#%TE625= z__S=e&cnxs5=&Mv%jHVN6cLrHGB=>aUNvo3xn;wiKMcuI5Jw>0-mZ0G1j(hh6QrP=IWnCv+kadL-j!ExdrQGdxcSA<*WVa;} zVTWv}^ptdYB(}QMtd*b}nr5P1VWIli+m) z-?j>u{h#@gdk>11{w8cbkGiy%O#V5ZdJEr69j@7}Pb?8pfPaK{4h#3$(fAufqxRXF zsWob7{wBsfX=Id>;q-VRTKoCZIvop#1qz(PKv7bD1xaAORjEj2X`< zW~%`A5qWNCNkAII~Uu7>(S#9ieMx$*1c>tcAf%?mc5(G5v2pYxdx(B{> zmG(iZ`wQj&zIh&OvnweYnlvQorF+~8agJL+f#T*b{W<3$(P4u7aEuTkxY1`~gFqwEou=m7Qg|a(!==5p?zWL067VB~d z4#yuFdb21|7_vIS%#DhGL9}>dYUnyF2Y{tld9=ik9wwrs6Sbj&@v}*h%$}KoxojeY zLlDEmNt#HNSG>JNq>qXxh^M;Kv##D(=klVaf+P-u6+F^Tvyd=tQ3UJAR{Fu#p-?nPgt1lHNu6$brrTTeDe%{s?-Nd{@ag8{B`KfjZ~x z42Jgqx-h3#Oia~=LC0u3(Gmidge{+#=@=EVxjl+E zJhM^Q4b;RAhm4iKK5Hr%YxMU6#?^;e1@-^0X$izPesAU0V)bWz#9f85vW3ppX9}H&poWd)AOm~QbgMmtl8)Rg-LP{!I zK*KJu9SjLGry^B7Zq0UmJ!{gxF6@Y7r+z72m zsp*jQGK0%%r%H=M{2Eu#EljgAAh=3Yl8nJoqSX-~rgDBFLs6~O7s1i8sQ7#H=j!5e zj-yQVnh*;eJmqNsmz8PezGf?)$6kt};Ec<#|NBH-z9Jftd|iOWq<5m149KmBa~8Qg z`zSZt1G>mH$~dJL$eMyulLKC2U`~!pu@J~;?`JTV6zF)yLL&11jGf`j%Drg^1;XUc z=w15HGFzQxwD>dOZtwZZO<9E;5xF!9vmvGbeXhfGESP1Bm(=_SQ^$R|=M?yJlS~0i zI}w>A6kw$zlcs+E7gv|xh~y&R4eNWZY zX;CS;!XD#CkLAaT7$v;%>ou9#D4DsC(3zn*5VJ1WZ!{Zgn~Tj=*k~H-=#3O%@EQ6e z`GKmND)4zlp8eyl88Pc>P_H@8jR6l5)1Ur#l({GKt}(UrIzvw&9C-)#`>>I&F+u6*4jT0 zgxxk?mo?m~TV4;mP6>M+4qw0uKxc$JD@yA-38ar>nmYi@Zo^3$p2jGiNS{?SMkf&6aas z-!}-GCW^Hy_1*n=uQN3`ZO ze9j`i9%ODg5Y@4CLeWFci{lF7;jOwObWODKbCWPp7zOO7V!e_ zx2wlBK>p=^kEgW;cNtk-_i`JKk)lbMmrBHssw}@I zFl%p(<3&O*Py8;I7~lJ&Qsg~+t`pm^m&`TwetwcShJ~+s*SvKc0e1C>Y84f=f=~2G zAbHqu*YZoSN4dh*6?s1X+-*Be)zgA71qdypf@d13;*H!0NA$w^J}rIJi%tlVszb}b zR?qhlHfCw=Y@XUE!)4RA^Lfn^*mNQR9qrAC8?80i3qAT zxzAx63tN~9Mu$|BH3#?(h0=Z=iI(E=mHx%#Sx@=RW7+#PKuY zpq7m&{H@k~oW7NflXE^jZ2T`_=r*(ypMopp0D!AD&LDo$#Q5W{q~Qbenhq?kkfX0; zf;L0g0YgwL!OVmH`a{TyZ5g z{S^L`em0cS&F^GB?b&!+$R;}i$&ra&Cdl5_CJWI6+%^aSpT`L)ip%r9E{>tvw)%;yEr* z#l+j5;|i4uyocU$-6%Tw#4b>wnm9L|FIDuv&re&{Ycm1k_|22msrGhacir{&2+3Ky zjt?(s2^?@aSY%-R_-j3Rl_qS&si>gWwY|LA0&q)CJEhn5zJSn~m<~G+qDIcJJ*w=A zq%1s}Ia?L#g6U9o<}yhuZfZK{)YpFGYUg!0sbkpx80)>%jiu-3PJ1}mC~7raqZAu#N^YZN#fnXOoNB23_M_HqTq1+|=RZ5*AZOTJG6 zP0z`d9RfJ%LH)Yvk>y<3q&*DXR;%p-Z5Iz|BDfs35n^Eq`o6c!!)`*_yCqtDG;>B= zbcA-r&{?qb!|>Yt&y|$h?o--U*T7!S)D?d{&o@HCr=0YcD( z2Z>7Wq)l)eK>Eg)m(@yO6upL02Y_>Ejy;>^I9>K9-0hExg8lQEFmUZWU=`ehsoCx! zks!GOH21wI^jf}CFTQlS`$khmhtDkB>DQiNMs9cYaFub% z6R+?@iSphU_4kr&ucHjZuh+}&Kj*s#NWoKOWUOcC<-F{+FBQe0iNY7L+gO&{FKeIM zTMwA{uQ=LWiK4zQ1T{dpkd{@Z5WHVWtnlOoGbA%fcxEEt(TFG%bNKFH70=at7jD%O zz%vmM=3t#oZcT;fP^!ue81yT~XmFeTV9_hhiIrcwL5lCkgVio8P%R<7UP5)beM?*- zi!zfeCJ>1>2BvJ3O!ztgYzJL*&H}{kr?N~VEb!oRG%H6RSjdLxy=@R2rQmSIAd;fY zkd#n){mR?b>5u*1O<#Zi$x;Kq10o9%r^qX!l$r4$^oBKWr;3@7bOA&X7G*`VAIJKAoE z@#x|K{w}nc{mNu|0U3c!^~FmnOe!FsGVGNgy-dkwvOgv@?Ykd>Y>tw=lEbaEMu)+J zt4E4)raeuN!E3pF8^Fh#UrTxl{XN&9P?3r7oQ;4G1)&m~xCe&F(Q1-!Kwx!`EY&b# zPk|o+)B65be}8-}b9Ud=<97XhTZEcLjeSS%&-Gjj%5dSaqmQK4kKcRJp4@)#^QRQi zVo_WGtBXt(iygoqf~mYVxr@sQoJ3~6iH|Q_Q5+~Ia|R2B59?D)VteCuC@vj?l6tFk z28rv6v*gU?(qbzTd^*sXh&_eD$A%&ok;93bnj6GW0-o7Q|9kNKx7GI{X~-%D+{i_d z9%BvZau$fO^$sLuH;4f!zlb(c^PW>5WNR)FS?+5#S6JQK{obQ{_WH?{PG>1joI@mY zM7O8_LYhTdWOpiuGzr5I8RCT@q?ADkDb>BufU-(QNpl`BvCsGp&{Tx)7(HUlJ|C6~ zlBtZaPNvS{wCy)Qqq*DjEH>L%`vZtgp-nr{N(EwNGk~p#IOztZg0b~{M?80}gg_7d zPT3F@QsH~2q>q0aYBT)>w~}0Du)l&1TRL4?TEw8cyY3)DqCB+SY@s0yqehlauU5(l8F~W%@5}F6y5!0)8XrktI8%v%+7eZ zgWnc4f@}9V@GEZkag34g1v5J~uqDg7YXRt#BrPfV@UXVRukg-m7W1j@j*SEcW~AGlOV#-4cJb$J zjp-VPQLq8OS$*=kFRiQD?Yi=k)l!`l?~kmHf@Q3uC5LNqGy3js4mO#GAs-94c3qoR zi?hJejO})l_jMeWL_2i>740+#J+r9wDUyso7oK%wG)#mE_{y2b#7keR;4EKfjl=Xz z>(@ig_Q$h%3_=0#D(Z#k=@|~L>twV(U9#uoJYvh)>y1noJ zGmgV>3WLl2wf_lXLbm0TpvBR=zqI=%aV<=U#d`Z~AM)yT?9RK@NYu6pdxkU-N;67r z^8Q5P()6kGx$(;1imL$pz~iQbb4YWK@;?AT78nC$I=>C zL@EP(wnz0_)4q2Y{(Fx4gyt~6j?osowaEE#=1d5=h(QYWpin&sk>G{!(Ee;VJ3waq=Zm*jCDk{)G!TsG zqNdgO@Jyd``C{k?bh4aVAAvT_5^D>A&$R1@&|`5T(-9O5Kl3E4O{Xp^wHjIo%2#&%*l_bExu`UknA~{N=3O36nT#k(?%IHWsFK+9occoT#bFX zkeKzAqcT0nwh!Zap-pUjti~&Bd8U@wls=Wu%k5X>NC*4Oy|#N&K`wEG1!j3ZjqMoT zg^UK~xiaUcIcj4KmCTHBdmpk>%iD9N;^$lJ?~YyE6Z$g@j9W8@i%|JVHaZWs zgNjwiSp_OqcLZ&@vLkjRRY9*xeDmpUy8l% z6coGE6^B}!DZcxeT0>Q(m*~oPMaf6^@g73v1aAdW(tGyxRjpoJ3j%7&mWDyh{Kiw{ zfuQ#an3)9N!rH72n)4-2SGt)W$4K7{Ugv9gDplFvn+OFxL|#!OV%zG&jUD3BNoF#`k6aaF`*<3 zO&K{%Lc>Gcnd*&p^GwaJ-5iqhHeLnOB;fTuO0LV1>73@Lo|n#Rcbz zH0i}gKS*UVjOkaCqa+&c!BFn!nDNMg;U$~!iJ}S3o0^Z7C|J7pR80#DZXuC>U$Ob^ zV`Il$Z?kLYk)=-@hq94imk{xmzI0%9O9;)~y{j@Frj~XPXRJACfwBA=39=xBQp6!S zSRx_=oloSddei5uVdiJ%xy*ii5qd&FqnN51$XQ=H+~6iapc%vnPXnVs>jR_fHyj*G z1dIm+Je#BmC!i%_0<&@8iJ7f6IoyRH?7zCJ_&#@)j?(2-Wg<8GTT&|Fg3u|QL&6FL zt~UDmTSNKxCE)`j4>&7sYPiA8+;!qA6IvS6Dmdu7Jp5Jm2I`a&fMl^sqAN@Cp;84{ zuYe2M_0dpQ5gpK=NPmNEgf23h#AGzTux4fgJ&P3*;^;&Stp=5C@Nu?g2S=)UXpi0j zD}9(L;6jrM@yhoZ?A=ZpE`hrj?X38TFqaeH0sV{Q;8#P+-@L&&72CB1rEtj=aPL*# z&&QRaQ>X}-$^|-r!eC<`0E%T#wZCA633}W=vX}i8>_tUG>;1gqM}UGdZ4Jv`@C0IV z5~9m)88F9!gPHC%Tsj>`u0&A>$W203`HB{O{!zayw;&}FdezLnVQ?9WZt77%gj^nkH4I`&zBrh1%!9+(syIygi@$^I}8RjO4~#jtcf{o1s9v` z@gbx>4LqQyOz$%0a8@(yPQ9cxRVat>6H&kX$i`__W?giX2)6;e#=KE!*(b&Zgfk`& z(|q@1^N3esA5+{883vMXg*Ydnrtvkd$H;(L^?{Oy0>;Spne-R==v#OMdOod8o&QLIz!Bbuh znn{eID%wk|p4WEgrl7r8ml!H8ZNB}2O!BR=!>F_wWSE73KoK^WsRL7lM$8G#bBqng8$FPp1%Ih*ug=xedUG#Ee=Vjq|Y?!i;InAGEAD$un5@U05M73%A=d{UVtT1#?6Pe`JiRbO6vpzz#REFdahH-2*-r5LXT>FV;HTkqCSuEZ?6(Mrog6 zd;)N%Xck8Tv(sS0oR%ix}uXba<3k~)-USOfwvn4>=YD$49UCI!*`RFXIHsJ^#N6G%;QY`92N z4Jw2DHoX?P7d;e^`$Q>CZ7{U-fM&E5@d{s4z0tYLKgXeh-H-c>ABk~8q2$0}L$K}% zzpORu(s~qbNNqo0b?H%ghlZ>*Gt8GR2jvrKFDnl(c_JvArz)t(R%9VB+u^Hk+!MpGf z;EM~WBLg4bm6_x^%vO~dt1JSQ2h)`FQ@M_*;dzT*{&=q_l#hU&m73gDV1JPEaNaD` z^#?rsztl4r zbQY6}f{}j=Z|B_fee2&fxCh78YS3V|u49*H$*0(xaYCElv4vVi0 zND3QG{5>69r6T1hbqf6MwxurHf$y{!|5rTto(`-~Q6>!sUXrM^23s3-iu%%-Z3pCQ z?%77oxE;}TUNRSUwM^?aKM0D8zBW8LtbpRTWr5jrj)-z!Qv+trINQ$Cz{>HLCn+i!QLCV$s&o{cSA1k4v~leT+X zSM|D3B3)EhB(-0Ug!bO?4YBU)ZP)im$*yPKgn}RD&ZMH3LrQ(1g-=T`AQHGLHK?G( z2H0ib={S5z1(s43DvW^HXhe04IhcigKt)oKwf$x3X`yL%CqJTk=tbJ{M45t(1i438 zrAH58K?D=EQz{Yk{c5;R5IGS~U-MO1#NG#sfiHnOREmQPSUKey<#7xY;iD3j;`wj5RV0M)Ae5A8NdS;(0&y_Hf45S?fnzfjrgxD#V@QBXA-m{VirtpYJrxE`?x2+d+TE6f(0^uU{zSGwrMb0 zN)#-7GE68iA>E+A1NyFH!n!v--0z2;VtpW2RxF1`#9mZfV*$-8QRoSUF)Y9!&Yw;o z0b@6r_l!q(FELb}d0db%pf*McFc`)=u)HPi*I1uv@jf57xKrKFuZMeP8gsdL6v*6M zFIsdMW`ZwUri`CXtkEyN6^4kO^%W4WxM4T^T8}5}@3B6%+t}3m1d_Q_3|c6dKl8L8 zAI{n)D+2q)EhPsQqb9=^IB>W%w@R|d!lYys?S$h z4+!>ueb{d-owDj(doF5zXuhnq_m32+({^#W%Qu2^8+hjTW9ngX>?8FPyh&#u?i8P- zP$rw}H~J16h@CN2EA0uQC-qPOn_EeL=evd(Dp>o3>%#{PZ7@%P4FZc!Me*h`W^I!W+gWE-0oyv_7n`&;|2!4%BUHhJXhf>5j zZ7gS*+mcG4@xel)227kB4)D_qq)6X7c9riSr4QLeXi%4{q_Sk|Vry+5bgC_T&~PVH zudFDwVjSiIKxLoTL>8sUNQ@g(V>m2foSozDMZjt3YJX3Chh{%kPBn|!M5A-YF_Wsm>dci=9IAk{E~2Bcup|ObHsaO0dI%e#)_nNabc)3H z?yneG!n?5!Sb!}^#FE6pDo?lw?;gS)sDde8an`5BzklD`!0#ScpS2{%ea$NKEO0ZR zJQ;*nO^w+X+fzCdqEL)n( zt5MJhSXLl4o$F?#1f zoasz5`PBJQN%erGX(UyiquBP2a`eL!{6xL36f3>34lwo@)ywxLFoiHpMFhXa2{xnK zXsRWxUe`z(0j#$f|Ax2(6m7ESEJcV7Ir8x&{^L&+!KrpRdX4C^dm4nbH z!QZ|eyI*Pul3j2F{Q$?Lk?pk2rE*wlf}Bv{P#+M$uqha5|}=fsK}ZiS`=BNq(pK!+6c0tSbHf)O1z z5S;ph3T>P9hxR$A|1)6mI$eI8K)N`ezRoBsG&LrlXZL3LuNc&=gp^sj{_E789V>9> zP;gF5o;r>MA3-Z>m`E5P{X!1Qg3TUru&Hhdj}=!0Q_trWe1r6QP<6UKKA$Dg*iw%R*0bbn4&mnO*J@D!*mE>rgG`KA@IVxy^A*C{59*%TaF;jrF!&3-Rp{zcL6>=pH1oj@EG*mk8Ah!}C%e!Y8O>C!w{ z-u4=<{ei@j9-CT+y1f$`>WYkpYDx}~w@^S3fZ!K2C zGDKM-SxjnjIh4S;wK+0Y{p$_GN-@ z@3&Ll7F*vBM%`B14SONv{WvQb?;rE;;x3xmT|QfT-1zfq_@Xb#3&f0``LeOfSTzih zX(FPegOWdv^_^ZjeG$VUMF=_}N-V?h;~OneS_~e}V*bu6YWwbumf|n-g?iPg)7Zk6 zm5#Hc;gE}Hn3{7)U`v3PVtJ@(y$Vwalj)mcc0Q$VdV6#}&n)SnlRNs3g1_-fQ>>TD zKZx2@*YYM~P<#tb>U|xun;;cqJEB@4g=xZTg#0VU1 zSi2U0&|G=0Q?rpDbeb=~Gichtrd5-K*XjKI^T5N#oy!p(A@DKXVy3Lw|1%q9V#{Z`@LI|@c8zV}4 zY;L9~fGYQFPy7f8WUa=;6^im@hmoQwOve8gTM1|TE!2d&CSKb0!PFIN6@jVB{Fhwk8}fptv&KlX^v1 zXdkkCmq=G_59hB7w>kA-a6{pJx^%HIIrASd9jRZKWczm5E?|bu^p&XVBozB$>?{<&QrShR>HftlZUa<4iMCQRd2wbV zdufJ}!g`!x(@tj(x@dJH3NHe>jesixJ8m|m4XH%bWtb~?5bR=M4iOjp2ReXDm$Mu? zTUta}n@8=x(rI(TZwQH5WVhNWn;!|fx*Y{_AKon z$Xq@~9K2r@>Q1~>^V*H(^L;0(2iQjF%3hFQEJ#w@V&BQwnNaz+oxfS^qK+1y`+nU* zq0_bazD*VW{fH5j%HuGp({Vq1Q_OA7BgMhZ3k5!OF~K9kN~S~EqN=(qU4{{oPb zPU?;Vs|q<`{o&TzhE{nqpp%M1#w~4?J{-||_Q6yJ6vuPqnQf9XOS-@Sg?6TOgLK7%q z`?Uhe)*2ZNw^5{#N*9f?x0Dto!{u(KOiU--V-W?TFmV`uB=Okzaek>u2z})v(DiUC zHEk0Ek1QWpWtjkj06}=&srEogM=rdtL9WQjl@$c`CSn>S=uUr-)j;%pU^xb2DlEce zx4faU-HH0c3JtBhPO2j5{$j(LF86imol2E$nma8XMtYX4&pEz4vvtVb#15{|a>Jy;DWjTff zfi~T(>7su2bp?krriyjyto%O!%s?~0!)!`t&8>|#+eg3ur8zNj3^_0X$JrSh*Kb>V z))ELUCoz^*LP}j)X)>u%F%X2w-q?`bvSHWS$N#eBwe9t_4Mx($W*fTh)tC=+fgU<9 zK(!KBE$&w%R__}FySE-%+R{me6tGYOUuo>bRZIVP;CYYj2X1`Ya`KhLQFdZW{ZJ@` zBbUl-8aw%N9bG9Ue^ur4Mq9)M6V}!y94bWgRVssJimh7Som=-Ydr79jXI>sYUpYzx zB?%`z;ye1{a*7)9`p^qRE(zF3&{p@@Z-a)m_}CU{ZAR?C z{+^|*lOPZ)icEI$>XqxB-Z<2aRb1#>;!`NK7mwW{!=VtirPXjuuXI&MK=M}=Ay?Sc z=~KKu-Hy6u@e61|>D>WEQAc-tENpJ&eQo{bSm;8rb~=j~N70D;AuC?lVJKZbUm=7q z=uf}<%;Ud#$r{l;W?Rzoni!#HUdz;IBY&O?CPbhL(YE_wDo`4Hc&D)$vuBZBBx=oV zDQ3*74`XXqR_8LAFOI_SSzzQ#R1Si1u8NiL`mmE)>Gdu9M{pDfEbhZQ3IM5O6&2Rn z)~tFk7=s`G(KJ*-wsg><<#}!0ej3Ss%eA5dyV(Q3c}z>xo5MH+#b?A-6MLwEH)#L; z)FmE;H7)oB$m%{t^(s@l8maS`W^D|C)ppm~I1QMJ4!Gr>%CuPhSXU9JNhv$spw^l0xLN<$pV(WU0{Rx)^ zAm~hl*s2jp49#6Uv#FiP3v>p>EY)YnPe=>4dxqkcX0M7DGLgZ+u zYitKzGBMg8NSyw*m9Uu*L<7wW7deA|u>9jc7|~gcwFCqZOEHdEVGgd+EKU9KN0&cF|Sm#O(;|Q?Yau<60P{ z7X$TKM8;JuZYiN#HLdK7%VuKm3j#)c#ckS4x~vc>%uu&)-c?qaQdpq~jQVg3O(f$@ z76Ff!H8nNRk*Z~O*h^3MsQxlbJ?!Gp`%~G)*BpQ0)vM#)0ktNn`f!FDPKm=Zcv=Ua zQ$YMIU<3Z-weP=zrY&>dD{Oce-R!bCZp^Q0N_jc4Y{h%a3zGa*@sHTeRWQMCZfT&= zXY_{9IH`eUs|b@T5R{Vs{?tNZbWOztQOMn0fXNA`R9t_|bDzA#iBh5`h!b<6fz`5W zK6KJ&zIC(SmWU~X28&@JjQA_3VmMg9&=ZSKn0xx$XHys56@k3G3c>#JE>~1>CQCRJ z$%C>h_lc@1-%k<&$zPKMZj#Bcgs7zv=TVn;%P5S$XtwXb-rlTHwepU!O|r@*B*UgJ!pg6j)3KXMWO43%NAd_ovxD-?c8~d#N zR>hG0DE*6OflCDmd%f;^V?Wgrgyp_OND0-KNhd@=^4EkAl{JFOrgwF!I$%#V8yW>G z{f8RX_|Excrl>K!nceWo z(?9!PA2|K8WAw%$+cRXd5!H%oEa}Cuah{l^g+KU->!&T?X<(+3-g{Pz_+ueaJcNOK z$0W?jcLE9n@i|vowI$-iftR9Oq5d!wMfw*E)_>-0Ixnv|xYn5TP??dzE5`}{XPy6p66Gz{oE6Hb6Y7Q+hLvL&2| zLRl-{Y9E|`$+U3a2#YdhgbnOryIzUB_~Z^!2f5M4xRa$v6xM&0g^~96)&1CDf)6^yHub;P?wCb$`XEeb2%@pVUy5@viF;Uy zI%=heoj9#4V<)n|k#!!YUb%E7vsY4}Fh>Djz)k|^J7F*#wdDy@nwNOA*tU(kRU7Li z1th?R+Y-|f2-2jo<|=u}1UC$1dUSoLFZQiZKYiffUK~fH`>cj8avtYDmzV^bpX z6O`cSeqG@&-_Z?jcJ28Wxw#D;4>7n*IzhP{7SLe+@&0?))v~r%pWnW3OY-+Wf1Zw( z;F}wkD;LNCFDe$)J$!Qx)0YP@B4_;af~v43G%hUP!Scg;rr^X}Jc@F0^+!}ZHPUaf zts8fFSip4<5vMJjsFIH*3j_@_3CWnfbHkybH<=QyS4Qgk_m9va>lovN_!1-+Sb(EB z+F|+%9<`9G%-_HQwf=P57eD(Y!I7TFWV-6<-q-81Yj=h6U?tQ|CLIp}$zS84Q?7UP8_bn25~#AK z<_7#amunYRh@|;9Y`?&?$WRQ61fP544E8oBHm8Z9$3yDo^P)%~UyQAR=U2Kc3U$-|IW0o;7 z@DyK&N@q3BUof~%cC|XxIt6>YK|eLsf$|z@jFbJ*{VMalJi2kHO6^6XQdoO?a~aof zi=so1z?gL+VW_sp7}D?m5Vdj<`-6BvJomxA4LED$U{%=@K{p_RbZh7AqS)40Du-M={KwMJ+ zCumC)8~MQCUDp-tnjPH6{^!5`x1)VVBNk=zMULpQQDkoMkSU1G%pvyb3tM#>-p_Rw zz`-vHPDA08gdFI?6mgo-QXWRf52NP{%`?u_sHH_reaO_qrWUbiS-vPBL+Qd{Y+s#T zbeWr>3@H-=lE2CXs61&)COfo`edc4g4+gc>=bvr( zXfHx1j_|;?)lgUx1+dxA?pRx6w_>MO)J8Z=J>mi;T+d3g6{Vny^OS+jZE>kq5|;u) zGaI&bkz}%eBz7oaV;96jT4Qj|ae=emzSPz-tLMS>!C=r;3F)mA0@8k>Qpo0!i-rV8 zyMQ%eA(55ft&9|gGX09csEjlKWCMXvm!O8IHJ;`_{O<8JmmTxKFP@mv+I7JtOHFM4 z&I?k+jjJ|THh`nwuvs8_OH&qUpZnsiU%2(_$*{KZh21w_|Nq`|(^cnQwuq(z#a;~D zdZS$G7?5(DAg=@4*=@J}M>6ECYiPaj(vz@hw1^}6YrH#{+T)(a>XYox4?gRQw-Bv6 zf<}A5Vxr(=m8zNCcHcJ5Bo?b;7g%VYhMn+u)fuxpXUtyMHESw7U9+1Trg8fen?3Q! zE4m%v!m&e{O`GCllecP*4vwzUQSc=Bt7;!I+85_3m34Hsq9w$JVdGf7+*2e-S@6k1 zh^B=%W)j=PvwMEDG1=??;aBcmwrFX?EM{tkU)OYu*A(e=RWnSqXHH>D*-f8%&u@S7 z=+4*oYvI71U;Xt%zihewrfZg;(?Y#8&@!(vl$?|k%pvj|i$&6L#a|Ii5EGf4n9K)` ziL=l?_OBoQjxto^)3vkTc4}QCizb-IPdP$7_(uh_NCrDzjqQ1j{o&rnb)%Nq__jm$ z0=IL)T<;PYQK2ZDOzI6ghGQZX&9t?6O=Hc>In(AJGplR%)LC< zbfU43A=#3F=b(MrHa5ji5$fm~$*1xnAo;7jc#bO#Uvzx+71rFQ>0Y!iJ`g@$hr4zUhOJ(D%P_@8@p6+Jem`3NtCE!S+kBF%qWfY}wgU7A&~#%b)q_ zp&bLBWW(UT#GT)~XZ9mguDt%@<_>?;6f)5gEQYLzIEr_IyudeL3=hE6HhK+pbu-S8_(pKjHG zwPkszzxx;;18KpQ&yb`qltH#+UFTXpy52>J_84)WOn96?e z>yIn7oZ8Y=NLNh$EeQc|4)){49!lG{Z;#CFF+dKFrvl^ZswOxMu)dgZxIGnX!%y#AFg4pA5g z0YrjslfsTatK&-xZ~5L=e!JoM?W*Bdle%sRJlF#Wue*_=`J#zia(LpWUQe0rUWA>kgK%(4Rz~5hHOljA&b4MX&W~ z0qwLikDY58nIC!X@*LTYk(br!Jqqi0>5v$%IW>Il?2> zDjsC(o_k~MQ!nk>c5t{q;Zy2W-HTx?gE-4#o`m8X)@$s!3uY`>GH>?0uBP_JXD$?2@;DgB!)a`rY)}Y?hoNW4BE6k)21ho zko}q$ThJt`AS!_x!H?~pufE~rvsa${gQ)TSw2O^X&S2!)1x-n!^;;?RMfty^{`V@7hsFrrXqY8rhlWIRhANPt#BUGhqk3 zjn)VT%+#UiX{&+uSxwC7-d-=R=DP3TCgEVj`M-TDwb&)rV>Y=(O~fe z8(Mwp2_N|Qd9cZ#EwagsiyTp_qF{KBKV@#3P}?#AW$YSm zF%?T7X(S(%_jI5(fT2uKCf94RryESm z3|;qQv=Q1Oej~aM_VEIhxeQXA8v)5*ZZxXrM*|Pk)eQDy<_t|hOy*PWq?AyDsFV}J zkxM@o=uI{s&=PdVw%V>(1m7P_LSU&ls$nMW1crgIX3&5gfv#bVC-f#%C}AdHBaN(g z@7}A~9tGzAwigFh(!3zdwm>;BB;yYJ#tW4$nPw?^av<4PH_iKz&wga)aXw!Y>8#l5 zi{|+_c_Cdz5gYYMfL;~$@NZup>PZZHV}hlz?*ve&uLqo+64XF!&ep5^BO!V}>`@*b-=HZfI<2SiE#zeN)}E866#+%^h8&q(FHY39~02-LmN~ zJ2t$uB^t8)tk$d6V&iMrMDRSY{yfzs#3RHeS=84azxvAa-hRchKoePM#CaDD6CkOj z>KwE*a$LrwD#cS4aKpl&J?xLadD2dLBhdk4glRZW7j4L34J|Bg#56k0jdyJoCD_wX zzYu9EEk$P8 zRdMnv8m~;ri>U6XsANTFv9*uyFo%5xCL(HUhr-xOm#7(?S-qaRu~0VA;l)RZ->2}v z5RHFHS4jjUe^nA)mu#V6pz%BU!Ggu}Haxc%qenW1h0rQ-=ZXZ#1LO;_mby&~lw<+^i9eb5|Y%;=zhwVrt0@^}Byk2j8eSHHC?-9o9n1IA| z1Lu7gC=e`4#Ovr*z+BLcsunl39TigW162xPej zWy)=o7&QGQOO|A)bkxhj9*gH%mlwdbaXo63K z0+d5689K`=6w{Z`y!_l@b83&r0~WwU9-rSxr+{9=l{ga4FCw_nE*-U;f2omxr(*0k z|9ih`VJ#AhMn5^ju<#NE;f2A$L*Q{zHaWS%lqA?YSk7`tSvd$u{VNBO>I5jPv9$sF zw~)PN+KE>4lO@hHdADIhag>;Ii#$qw9njjW5 zEusPVM6Cr2Vb};BQ7mCH#?>|v!0%k&aXnYVOLSziSDri6zd!7?nnjiiD=wt%?1C;tGz2qL7&G)73Z}>Q#}o{{;;YDsxjn1S#mgqU2?S02ntHQv_=$B0v=)^u z2t4ID=N-!C1WvAYj;Qi`I61C+X5XIe2Q|xwj-&XDK;DiG9Zjbw%U>@V6Z=K_ zf{0p+3s=4#QxIDKrAQph`h7m5|M0Zlu-yMGvey~aU zBDTquAO;R|!}EqffH}+_eduW~tFx0j&LI*MhL2!)a9F6En5@8$0qwSdWuAO}0pegl zFW4I_yGoe1kVkEx9yRR3SZCu*HLhde(6HEu(5)zqo2ncc;3Zt# zM#2e0%sejhx)7g$801AzizBoZlTVcL%-=SJ?R~V5K|#>+y$Y|lx~?yG^U2GdG|##7V}*LOPqN|6UITd zoh{Ix#G)T`1%rJN1oFo1&Oh6aHjbvA>v-)s6J)0-?pX{eV%r@g^Lv$j8_l2p&;7UF z{G~tKvq_E72}aPx3UVlm#kQ^sKl|v*b>3#IKzEQ&6&0|;f+Ahy+FDqCIigh{P76G_ z5GPJS`N~KIBOv*!V5rI-QDrk`&jfj)10^(&tQVe=6W{spMOr#r~lr*YTtH6sS`_p|- zCx>9J>casjq+_AJLfvB6;NLTZ1z_p)&CTMcNNn}N#n}3RZ9+XQtRga2kqD$dsUlO! z=#xZX6x?(Wq@RxX;%<@>CrpODYm|809z1^K+%J6XMv6{jI1?r-C<0a~f_#Z2c<+h! zcov%E%rCw|EBx-#|)2mPK_~JkP z&*Q(}KHNLLn&lzUJv1gpzQX(2U!HwEpw(m8p3Y_iS@F`-9Xs~edU$~NOALW| z>amvFg)Bz$(Tl{KY?|Qm7g~mvfPf3`5~Rqa6C&WW6Clnl7if;mUqjY^ArikCH(Dath~`hBth4U(7$c-?*#!|Ji#FIJ=7T?tf0Xb^ESXEA2|F z3P=b60t^BJ24irqi4z<<#ZGL;{-ij0oy2*azY~|(em}8O9NTe;?HHHX7#j?j-bE20 zL=hk%puu+p`~N;O_nv#VYNcI)1b1|Fcg{IepPBicXP$ZH8S1OkD}R3!et^sL zX)K@)9#B8~?#)q^#NGx%H#wq6)eaw2|MQh!f9rEM_U;yydd`o;nVR7-5N5!OQ?+lu z;fYLtfF@Ug<&v<{fNWOfmo)qOw2?x2E)`ECiY9G!0Ayqx2RbEnorV0VxLKuuY3o@{ zvXa=eC?F#xj(y}wCXeN5nDNZ7@sz)s!jBjz8+bW2WX01T{P@*NPPfI%!wkhu+BB-;eYh5C}62l*SniVfcJCcu3uS^SUYwUVCx`$`~iBnS|*Z zMC64|uyO1>*B*uy5xAT^yZ0OX(%4uGHzuvh?Xgmzf?vlEvWcir%qs84Q^2xW4=Zk(E7PJFACf1GL->;k zUzI%+Fu_i9U}bWVfxr3tKfT~hjRH@Y;yPl~vQ(z!WeQxKV4t#UMZksbt^rZOcLl%5 zaY8^g!Y_joW3pi8cY||nehCeQ%ZsAl z;Pq!8zjw>-9s93%$7`>6+v)(rJ~1&^Mg@ist2=IbIO%Y9gk!2)xt$>eX1zzxuspB$=GiCY& zV`G?%3s1vz%RNvAlmQz`*VshD`$9aUoC;vdg|ILliG!gJUVm`=#tz2=N}-e^$An0& z1-KBeamjf`oHv_uW4ZeNUBN9sc=&6dzNvGw;($qbF;ckpnkUo!n8`_?nyYMVWMfxC zRBg>i0E1`fq_`?^_&~4V9fhIHmOACM<(#KmwA3bLJ#^@>hI4|Lu~y(s!+KOaGogT? z=}c0n#GL^J_)=F{SrjYj{MwNgUwd4bX!snDc^kf($iohyTo4&@>QbNn=Z`io6Sj*Z zRfS_X9?nv87tDc=0dC>Dm}>PR!6&a{>Q*zHJ_^x%J7THyA@!qgUYqX0(hbNhps&{i z1ig&oAxxO?vOz=?BhraXkGpllzR!H(UvB@&_B>09L+asspHx9ybhxyD65;_~#w13O zn5;*}`nm@N%Jc->7E7~PEly)m0JM~ah@n0X+LXJBJ029M%pZ>jRi~Av`5>=cb=r0L zJ7RWCVSCFoqRK!mAM0=!W^0U$8P9h|7N6btk6-zKV(LR`dgREIQgaC=dEliu2+f1i z?}U@S((5rkOSLX&5mj?J6&q7PZdYfj<+S1&+?c?}`4*!ZF(<*2;tkh4nH^xYQ8cyy ztn!b+L`498V7O#4(aF{R>U($Hd-G#wo_V&P4t!CX5O9)j3ydKmFfdOiA;~h^^_@O; zXln{fgE*?TF5$8s>l2Q!0^@x>{n88oX{nheDn;elF;Sp`U&lnY*^3mbvaP6ms+pKD z1({~Tj7b?&MaM0J#W+&ll$l^^@em7I+Mdw>#@s0W0p7Gv;`%*qldCUmra*}~c8 zIuYh$Af#)tW2T!ENfHdzm)NlhJnF`%?V(NT-rqdx_?&<%eOrJ8WCC`kDw4djW!8=X zFV(qo_@Tp3aCV4+au`ef8hXREfl8r}P7ZMsc+wge!elvW;iCCi$M!OwjcPtjSw257 zFv3!RdU+uIC2oY5A-kzs^<^l|D?Lj(d3Z@(E!DnKb@z91JR;)*Hw6KVEX)KjGOx10 z2)(=@IEEaDejT6n=7Fs@~2YhhDaQb*&p*utrjHF2ZD3S=BF zi&utflNSvY2jP{5)sMe>b8Y|ul=(0t5~TJpP{fouqsF4i`aI7qV2i^-(nIiLQIx#c zTrZ(vy&&kg6bJz0SyD-{RLURe?mG-~ra58}$$@VDouzUYo1*lj;+qKtCTq)?q*;l3 zG!$?!WZ1|mJ>TjhLfB;KGK0&nNNE%jByfTZKv)8(#=wh zbLu1Uf_k!mm#}MCVKsUerW_j7j-p7Fpi@zxGJl+kI>-GTV}xDY^P3KK4>RHbu&KV! z1tHf2h4P%#-+bw#t1gK%T7}_4AVxxY)v>qp&KsWJzo(d2zzvf@?6w2YMx`vuAJHu#2UM^c8r;ixr zwVg0=oyVw9I#}5gnRE-^8mNQMrXRoi*@zXF$)W)-NFWk0iDP~XWj?|m^HDEkBJs5l zjZ~PW>UpE5xzYyf$THQ#S*K7hE38+Fd}s#U93 zo@oazBB52wwsq|9*|~k!wk=x+`-iaF>_psTG8U<_&w9=2mt1kdiVKhx8oZoF%&C=H3+o687)5WIe{yd=m&CrT2-<-iN37Copf7~y4-h0bQ?4&K znp`voieq?zLD>tglah&~SjCa#1s^TPb)vg>>^t*yr%MPNR-_YAZxGErZX_Xg6mhiW zs84}}tGL;rKn1^MhmJE!#rD{l$a1|S!WTVyCQl9x%5*v#k3~6VaM>wy|N0-_5}B`; z=K&YQErp0$-4#Df4R{z1{o-qe)7m#JS$oxoJ6}*YUwhwOH$9Yy)+Ft!ZBOq1)SrFv zyw|V(lfQarZHvSOQ%Hb##xu5Dq^#$^!imV3wf$Vs+pX$a8Q^2XhWET017W<9^g#-c zkIaaxQx+63OQ>b3nfLs%w-ahaDBrO9=2w1_8FV6k+!1B3sd5jCF?~-6f-+eh9SyAP zjim2vQ>Vu!T z>}$XH^zyaMz3D@Y#<*Yq)IB@C^jAOJzlmLh5|*J?awdcDmlvV}HW%G1_%H7AS-?w?Dh?g$MH}k+QIWtwF;EGW^4fB5`o0;3e+@_2?bl z2e$O1j0)Z0yJW2pT4{0>CVgRk#pk6OP0t5%aTS~vQyNwd)E?{|E@6|UVhLKu@N7ak z*tE`0jP+F9aiKtk|2i&YJ-S5UI$U%fJ=cy;9T8gZjXuM!4oN*6+kgkq%U)JT{5*>`Rl>hK5mZMpTj`)cMy&OCp` zxfiZleCqtAE2_iI=Mq+PSS}1F2%A>l{r8_lbG00c&BmZn$xc~-X}+W$CFB|%2NGhX zdO-7Eynz>g0na3MqU}j9zhf_mNMQ8=Y|FZn_kx;Ma61b-cn^YQ@<}FUUB6mwd2` z&a5_SCZhO=zkYwJXYs10PkiCGqD&h_PAxJi&6qHBiU3vZ`YnHEK9aTk{ zRBBEYU6AX4{A^~B0%{=7!>(!Jo5{MIdgG_8rR*p56>3nPPKno&SP*{z6JT2PL_%5W zybI6W|LpyG{#c0J(a|BxyVIw}q{XSc7)yZ)evPHbOL|YA=Az#_9=ec7<{iZ5<>)ZF zG`{0Gwb?)Z>))+uD}h%g<42=nMSpbIqqqaYFw_qE!O(r?tAFvizy0P=N47RmpU?T% zeEpYM|IKfD?>*N2T&wqH@Vw+NUKnoS>H5E5vUu!w`tS*9r-i| zO2Uqz#2&|#YOc?&bLOL)(fMNP)@={n^2>Xwnxe~3Z$D@4nOD4J1*GfWtFHT>n-f-Z z-UG<>R-t1j!!*lU3P#QkOuzdug*$CjQMhbA4(h3SoS@~IE(%YF@5B`lpejHT0~BvJ-YsJ7$_#{=55X}&v$`DyA zQW#*8>Ad3%4h%4jWdp0^uIKVvK32$>kc8>$%eFMbgH_xtQ()XDb(YhwBsV1m%y?t# zV3dZyG?gz)|DzovSxkPpf#;??cj&73tn|^T=ZTK%Xdo-hjTph&jZWYw?huMu3AOUv zy5IW!H>xN*m~(S#TB=p!e|-KsTc0|Nq)8@cV+$zh*Ecno@r_7b4zeZ8M8V370>3EB zt<)^9uKBNDq={wn zsH-A`H%?MK4sKkt6!IX`!Tv!{FLankEiEk+*Z6r$eZchIUW{7c3eU~k%J0l6P$B5d zX*dfhG&eWNIJVeLS8xUa%&!asJ7d>p{{Bm!K+Y3Ea6Q_EvEazK3h<#l0uvKm=romI zg)odTjJQ{+H~-ctE6$leJkq0bz8g4+e8bPbaf{DBK6Ew9(wDq&mTGICC+ABFj=BdA zOnm6Hut>#%15tv6^gdrbx32$*d$y}AtB0_YVz$8`5xWmj57m1Rk@K+`h!e@2oM&aS zc`nblGdUJsnd1WdBqL^=wHY!|!zf%#Z;O{nbNasyY*Tod5}8j1g$^%W`$#0#iH&%ugy^WY z*mHzDiZdrM^W|6=+(KuT6Lhs>a}Vw)H#`=TKlZ-@XT9B-0+cINTU(lDQ)u?I=e_)3 z|A>&T(4@znStaHe3RLiG3_)I=+m@DQRKRcyT0OLj#lZRtMvwV|yZOuCycXt5<5ywD zQ34fFt{7oPe`UM@P?o5oH7`L&b#^NcXVzIb1ciqR`c5$J%%Zp z{g4S|nL2M`PsX=T;dzVtI>+ZX~Az4p-#Qs@VQTp>-sI9$j^8I%{hg6PFX zB3#DA;OLZQ^Ujp2@CyPnJq)a#o}Tcd(%2}|-9nbaJ=%e1^|MAv%{6($y3A&=Uae^r z3H@rV{|QlVOU;?v6p1=wE`iYi1{8*_l#a2$AV}6nUw7p~WpPUtfQiZB<k-lTO1HA(U{DN>UTZl?}$U@1ya|oFrKEZwB@n_r@;8;pj zi6hjkxR;v(Z2FCt53;@HRy4UQE_}6_=S$>*%vjpEpPVh zbR_1Y=rC|NbN_9R3VxLhK#|I$c+uKuI7i2dbau!=YLhvcUFFmdzJAl@jXR^PDq=!x zatA>SO~5JUh6bc~VzBI`=efq9B-49@aecj5oh>?BRn-(uQInF3*=c=>ZKgW_l?1@AxlIUmM4G|`Fwe$5_6h0M;e+pZ3m3OwBxYhK%ZsJKF$Hi;9DS0f zq$v9QmimK_ythAlFz>Va)mE^I=RKeItRZICyEtm^WBju=dd|ZrS)J6OEJ(YGm>GGx#nBVh=Xrq z4WsNMP|By`I|>R^@arguHS0n3d_^~Cz;GXhWr^V8z9Ra0|4jFk)K`F0Ohz$S96 zO~`SVDNe?|)9Z61Hd`y{fo89Dc?AlQsi~PBav$Op9-hOCMYM}*XZ#594Z4jX5x1d3 z#mzX<#50PM)Y(co^s)(1ScZUJVCMoS6S!&L8P2(bxk!ID)|-j+=95ERL;D5K0d!FT z;Y`dJa!giZ(b1)Lv}r7w92qP$3aF>^sr<|W1*Q@`%tG>&B&I_F`ZFT@mWD0)L-GMH zY^^QY)x@Z$>o#Yz_#6zYVYsqMWQ7x6-+x`sSL3=Gu7+TNf`|u=vyk^OrQuX;V2iLBLRH zs~F2yF5A3uHw!ULcfm9Lir?01%wx4e3>%(rM z%aMq|u=CJedoF)pJ5yS6k>kU&>hx1izd=2~dK{3H&1eZfH8AxlW)+L7CvF*d_>O0S zv>Uf$dJszpjU2^5qyL(cGh-BZ(Z6^?|;j*S8p{5m$e&0;LXG-69( zVf%c$Pww5(x#aw2?s=@o-XtS{fuzICO8E+T@CQp_ZB2a# zv14T^&$Y7lx;r;q{@d-Ks_ay@RkmC0d7&%f#F5!WoJcGZ-@P@xXtkK4H%_X87^qkM z{D(KBdVN9`#bfYaM-~H>16EiyBhD|MFzUJsC_y52A`#Ny6o@8P!Gx3InfS~rdF+bH zS}Hs2TiHyIvRtpGF^sf(^mWf~n|tb_*2PQPmYlYzeMw`(TorGU6;=6&7@Za2VfAF0XH<$rN!VPv0eON( z2w|iW?5MY^X(^s?x|%c=3|gRidkZJsnj#fyUA(FMJvItd@ax#<)b+EG9^S(J>|M02oNUdl~aHBE_m3!7J-b=oPXEooa)!+y_baUnrcq#yuR(QdWD zUjyQTMh2uPHLa(wpkMX&sDt~4w{P9Od&mCXj-k%I{dP9yMxq{#D2@Zlvt5U^6+_Lg zN-p9IJ7@fr-(#Ub1;37kYO@!I5n#rxVn$U}Q_RSgA{+9QpR#`Vje9@-PnR>< zVk0w-VZpF1Kv)?Ll*gSlKU1!Hg$2QVn+7e;_TZ?FylbbUZ0*j$kfmb44n-n6b|{(= z+l>`4Z>el}qKu4oPmD|?RfCOz3vp6w%^%^Bh!Z-%D;A59U`}&lhMzK^W^j!s;3aQ~ zEg*yK%lhrDh4N-kv^*UK((^-6yPe<>9m`P$K8Fqf`RUE(Mq%GB!Af zLRHr!GhKQxi%do%0RtZyilY)v?fTm`z2>~L*Id;gr`+pt24WN-Opm%!u}E-s9b?T= z=UlXU*VFg8RyFHtfGCiXNF;LEEcjDIqQdxhE%)HQPT>_b5D?({pFFZ}$Ki-o7r=b! z(rUabf|t}*#`D;EXe=}!MRs_AErVb9HKsuxfpYXGqKP<5ggH*-ji%ccwVtwS*}^6B zPFdOBx=2ge4NZ!aAtTVm@UQ|(A%vn&ufbJLMMDzFJ6|2#r4H`t-M{P5!95+%KlMUt zD3=>yf=p#ejuwbw6C{)3;3IaHGe=cb@dTP290^WQ5rVNM>-p@d(x-Un0%&ZU-4=S4 zrdGkP6Qkkj))wnlv2G`&=Z$(vODi}sI2miM9gXMe|NRU9{gX@nCWs|u28MiziKMVQ zQ&xeII(2|4V;5eMf~%ITZ0B%kYea-kJZDliFeqlNKoFS-GI~X-#o~iN3|ojqeTvr< zfZ#BVIkYcgAP~!Y5)5rIk?%3vB!ncRe@j2S6Vo#yv?3C7K+#8KGiKmqJ>{{h+|TFi zxWfTu?0#RgWN};j!p7$6^Dl|!Sryg$Hxa?H=_pC0hzD4nL-N zvZ||7^>mJG-Mr(_{;pkH_jIuJCns7X0Lwr$za$j35)MZkc}XH-{z8tiMYIO5Clm>y zu^6Jite*weg(jLSlmI}p=a?Dd{~OW@d6tSZ6^UdGFyG~l|?%%DFqn2VvqYTJF?0~Wggi&)PY>q?`bSd3 z97hnNWLnp>RvRHB1+hWK)KvCA$%L463>-?KVGO-ojyb(+N0`t;0oDgVdKNgPeFCP~ zVl`I^quYYaNv2p`U1$9>vxX|A^D6OG@atu7Xx(E)%3{x%(-0#Y~{V1bvi~t zJ{yf%&3E6hareIMpZ$+NR)~^>^C7Appm@d)h*_#138Por&{R7(LQ%=qvtqs%&7OV! z8Ogdtb4ycIyzYgKyEi=ebT$RI=%$7TWyg%MnJlXlbpQ)fDzRoJ178ZN=U-yfOLp4` z>jr1WTR1Xbjvy15@yH0xlD35l7cXxYg@&co)f{C_#p$^k5slWBqy{B)^`(58w-WIn zgb|?Ok^UfHrEFO3+mqe5yJP>(g9r9@JofNYfQu7g#5ESR6C6}-2ek?LhDhpgx7sI3 z%7UICb{hfFV z<3ZRIVK7Wm3>iH%Bj}o;+TOmb zwRwKyn)6yj)-L2IBn1m6a(4h-5M8Ha&|CjP1{OgjEU(~^GOA;b+P`OT_qM&ecO2Z0 zxcyLfkVAGR>!cAo9(S503E5@M8mYt)hD(73pAs|8Ef^TZ!fj8Eidqyax#jw5|+)zsit{zF%Z( zqya@&O>uT9u4#;>bHlb5_j7?2 za}FQsZCl+SWwBN3DbcjmYui~X?#yp*S+Ho)qUB5p7cN>_Q`0E={U!FQfE&eRK>{-1 zp{(}_!v#19+r`E6A+>LNe$S2r`*t7NvvWVNl}_a{nOr6`ASLwN)#vuKq090jEDn1d+(oC?dx)NQgkMtg9QvTzYB5L6nHD|%WLr%xyC7x(i zblI|n7?D2XoaH$;r8NIlmP5rmBMMaTYewXorN{*cOD0vaDwajT3N!AEIR?ojj?siE zh z;VNv4mZ|^6r$0KsX+d+VidM@hM^ajuw+REJH%&{z1g6#^`_ho5VWKQU=Bj;a>(=fU zpWnrNaMzA~16@OQKBjWEm$qQ(ECE7@nTQBR6&&&Li7}!g?_*GjhedcYSu@wuP^_^` z1aJT(A~Xs`NN%MxFkK>~L+6a;SAi^+AYrVWd|gxR+_{bQO|^4c8tWVDn&&muHP$rG zb8A~oP4qAFd%d5utV$K{j3_X9cbrkOmB`bf03!{j#|(dEeNk0c$9sDx$1NSf{K%*Z z*k4@z{G*3%y>|0EKfV&Zg;Hu|A!o}AtI^vvz)%KSHK$jA8B-0QCn{E0W5a`jSSYx5 z9_oc7EI_EEUUL;QLvg&8SryW5W^dZQ1Kbf)H$oclDg+{rygIy1ZQr_o@AiW`x9&c) zx2y9IyDT~IGQ#dcL|{R{Y9|YPU=gOoM4bVv527ZabLCdDqh#b&D2Jpyxhvooe_%7w z)u5tsuCT+ZBS+g&nOy6_d2^cU0I9hx^>bR9n&&mnnH#T1hGxFpvm(h5x|Lop`2 zSXI^5tBXc_fn#y5b=!Mz6n%|c4Tp@@Tf0x0-p3Y)Bn4qMogf#He4f2C6@ zR;>{lK8pz;yp|3oBbFmP%qpUwh1R`Fse=dAp6v&BzqohLj(vv@bPo5Y(Y->=OJ&(v z#X&xH(rdPSp^a!WNLsoClDnXk>_|Xl4$yKdPl}Ew+G%gFbQcy(AbtIj}TxR{GV;0cG;ESIU2 z9EfAsl!N;^qCq1{S~=ylFK#mf*TAFvT59h6#=P3@+RjMIJFusdoi*TadPLBMIH_T^ zeaqop+xP9?bzskq{apw9`#J}zOV6dO%y`d%u=ev9!r4=evPHb%X#mpOW$(dMVFn|x}ZiR z(Pn5FcnW{Vdy1S$spMw-m+oi52c~R$Za6*cvH23H@I8OQl2-7`WF?#stW91a9zp?? z6~IIy>3Qkr?p3?lG`P3pz`o8sJNEYVV0IOLNH!Sp>#Dkzss;fbIY`k6)}*KcSw*Lr zEWi&0keUl%i8WM@P1n&lVm2q9j18p*bL`rwiOp}HQ`?xR!+=*^bzNiiX=j|$(iX9k zOtLjYN@pjH5b^Qg%r728SOOM3`t|Jmgqh4va+Oqy6sX`=krFTaUp=(uE~$ZG5nV3E zu%7RPiMm%t!iJw$=4c#LeMujrGEup zB`^8K9S?*uDOY}9VhU97>m?@ml-V)Bv@pkrc7!1*tn-y(@ro&wj8x34mw1M<(OC*R zdp@|IVfEyL&oadWo&bV)+!b9g%~uF4XdxT8(AHkZNo!H;CTD||=J|a+J&8m;$_7a4 z<8k!bkw>9@AVwpQZ*f`>F)_28td6F?C$vKlB0e7S}>HEj!{g02QHbSdD!YUZe9ZLD`s z4~$eoRROVJ%}91+Br_0=MzYy7PZ9}s8|mYem^8XpZ9`0Os%>dY>)e+4bDHPX%SvCh zXpM4_m2d;GwA%WP|k5RjZII0Uw2s z@Il3O&PAuRrTX^8lZk=hfgq6?Py@B?)sZ+L)o8o&3?kBT-`_{zJJngSL4IuV->;r29Lh+l*+_X~b$d5@q#5z8iI z>n6wukTByP50|h0_rGuK+B=x=o5Zf7$_3HPITx&g`736kQOrcK)!fB3Ie*S+D_6{& z+l)9L(fpFtVnSB7VdDtKMFo-lYt1Qvq)A*c$w^*U9>3}-P{FSgtP2S9G{uM(>6b3C z{GwMt@QKuqKpD{}5S~aUW_Xz*j*Y8wS(w5*{e-xgezozA{p+rO-py5GWS*VLk)#u; zi(GVN8%hmghgC;lbyDXh3aP*UiOWj!T3t*5EVg7DXNs@=cx9*aDFR%whAmY_Jw$FK+N7%Dt5f#AkUAyA@VNe|sB zNiVNPh6eWT>{z{W6&VDwuUYG`m!aI4Y{OfSOTa6q*8OV#kH7J&d@7zE_7l-4_6wsi ztF|U_@f%Msm=7EcF0!3zcAaMHjy}n8B%LaW3Or6lofG_BrSVSi1{z(w(O_2&UM8He zU}gMToNxcZ`~LF&+n$ejD&nDas`ntX4H^Bi@=F62#%0){Y_kI=r?x&m{9m8FC zb`V?3k-!ywP^N@zdaSC8H7;}B@h7WLwJ&4)PBb!9%JV9sfc4^wFD_lWH2jk5kG6<8 zph4&l2S(@#DHJ{lJ>jd0XL=MkT7F?d?2)Ww@~AC{;%9FlezD${R{#8oYqmbII~l7U z&h;-?GH=D1%iEW@`G*;M{%C0d@qtkJVP(Te}Xp#a^Lu8t|X zZQHh0@atu7ml=&W6xl-&{sew4#>)1pFZ}g)_iyWACTQn590Xm4@lL}cr%!NF4oan@ zijfJdOt4qm%H-Ukg{vDr^OZl2&jHRv_Z>CV@orjBQ^BwCgsVI`8VZ!-S7rWqG?Y7$ z!qSQq2TmD};g&o z5*E-ZtvQCs6>?rY3H|Wc0|NsM4UHo0a?}Sub>(@NuKMnO{;>C8I&RlS@->(gm32mO zdYdJGWp9vjqPEL9k1=o9?|b{zmtXbnOPZI7k{NJ^%wBd~604kREW+7zSgT@Koo(f( zLV=l4pn_jBqu)y$TNtep0vNW7B9wdJyuv~uBOro6p34zHYW3?||K+T|f9&2Zk3I18 zrVU#yJ6^D~r6ZVhF*=ITu{CF`IQ7is=d4|n#H_xS_lMXtssxgEm|MkDp}a$r2ja zGO{o{#_5s^6p8RkY^dr(EI_T0rP3bR0Hgscge_C$F}EiR9V+E~WmBMnU$2Dr1**WK z@|rM_gau>T$aDM#!U$qQnd6K~F+vDqfpHvo>2=QXv@8A!1x^$SRPgIWX^CSf2e1{! zGX9X~0<#Qk85A>qgJMJ}6Y+-$6g{OR3r`Hpm1R{7`KtJ9)>%tSBCm={pFEYU;Md91 zRuh#Jz`}*Vgr89QSpkO(a`D7S#KMq8ao4CsOB~s#$0yefSW~XtadH*DQre@VKn1^! zj$X4Lx}-!pt+dPazor!dpeoaGC5pkj3F8X-Y1}&sAS>YHgy~K6aMUz+q5!k-Y=-QT zz~AU_3$iN6%pnvgQ13p!zt!o!!Xh*K46hVLr)HA4cPaM3|4%48U^uyE#Kc&pz| z(w+5qFtbHcItm~QQ?Ga`6nLp9P{FU4iqgk9pD}Vxq-NW+&BGWGcnxc|}(4rLC0db;r`@j)+RdM9HGqx20~f%SZmq zW<3kv^GVVqM_fr|U*~M1m|qc@nbJ;@!fclHNNJT9p-93~oT=XyPIW@qML6?YimH`Q zNYO4GVq#JoU-b#{EqhevEv8OFCN1O1t00RtN_p#I1((H7F^H=#9)%v`*Ihz_^2Q`$ zZeF>+uS)!)Y3Y%Hdj82hRxs8OUt#zan;iWqHV^V^T@@HaDG-D*fZ4I-RHW|NYS}b%Z}MjEbqiPIy(|! zY)^j(1?q*T_FMXtk+3914||=8-r}%iITiXO5{Y;`8jGnIwPzD2!OW!_r94(C{vs#CBH8r~qX9Ez^WJ*Ly zWx9^wxB{W%OaYZo$(nC&MD5u#xM$~{f&O8vND_ehl65+r=C`J%s%3u5f<@4i94*u>3-|~(RT||IFDE-HzhL>YL*57{M+h4tAuqWNv(45JnOj0>FQ{Ni<=1)JF zvsIuME+@-mf~KzvBpm!8SwGMP*w5nFQF zqH`}?y=u+!MJsHpTJcq)z?zDM+)V3_&)THRTd3-16=t#bZBY;2x$%}?-t8DX4a6v# zp*aNuT);0pk*JgPGPR9W%U3Qr<2B2#c*jzpgw`WLa~h_}t32Owf`s|%^{SFYXiC4d6^z_1GT=wPtog;u7p^|{v^5v6Xj`cptO#L~ z!FPEX##7^@d-5v6FWQDqQ)oXrfL{~MHpTpU4Dx|r9KKV6t%=5bdDEg0y5q?n#@p>T zt!r$ne&_FAb?$3hll3xYe0{csL9_BG6Z(TdhI-UhkKVgEo}2IZQI4b~BwZ83U5-`V zxg)dd8MXKvIlg*~V<4FVySe&!wppwTiZj1bOEW@@rRNC%%OHV{6aH8#gQ1;huBI@B z311kGKBfqKdF0M#)IeQrevayo#Vs+>L;ALt9zHy><#BJtg>F`xuPD-bOez!I2FlIV zMl;Q@V9>;ZD3o8@^C{%4S(K2Wqv9GwlnXaRgrWSdr+aojf5$JPH^%CMkAC70&$_6^ z#%vk1oX&X7%Q3oqMK(P?z}Y1mAKu(jwJ@d&L+*lEf?ow73IY!f2;SEN#Z;_s&|kl2 z^WzUZef`hf-~ZTeU+{*7(xx0701!^YQ6nc%HBsxq-|W5i`!{y(AB@{IwjYZH&2h)h zXUfq}VHcN1H6;<6%U9Uny{UWC6E|Le{jcA0_3M7?Lu;pz=PN46s|&wMO=pH;>3*8N zBKi^Ck3_AiOi$)u&%pO~+;HQFP)%#(Nn^nkrb<_XgbCuD|!x^RBcKdMYzP4#kvb_LofMi=!Pt zCQ$6#0eng>GT1--U!VK={AG*f~^iU9x%28fyXm9r4|Nei@zH!CxefZtY%Up|7gLU-6 z_r$bcF*!2jIry9%RQq?TuYK-&U3-UOcCtE{lV``hijc?LLVD{iHL$;M7L!mz%0b9s)W_x zrfYk5=f3)nKe+qXyA0DX5jaegJzsEJ-F3@DD$Aq`2C&c^0t(2s@@`eEaowHk`Ofl{ zrZ}1}DWn5L3Qxdu4B=Q_V0$nu;*nnt-4KM9QVRIU+?WJmGj+T@NBr7MKUqa+3(BBT9)R`(JICeQISRvN%1Ho%xmMH-5>7V3 zEdGVZ4}9@c|FQe|ekUihNU4u*rZKBZ8+fPDTZ65i^ZQP8^o^t;e7z$ z6sR$#7UtG)aiAe3lqXE;;jvWCvofBY;S3}$Jf%xfa?kg@Ajes8c)(_n^aH3IJd*AA zGd=F7Kl;@}&#BCi270QX6CRbjuljbYZ+zjpzCGD0Ri`{@0wilvFB;JkxJSs9vxfc1 zh`DmkNG3m$@kTr^M_>YYd>24hIo;f~(vftx^Z!2eUk5e=vxTh2tN;B;F~JOJLkO5KIUeTK19$97^#&Z5;U-)^XT)5>E=iJ% zB3PdwvhKDG?|jeNM61ZxLMIbdZf&*n>{e?eDJBUGs1!Od0VQt7-oacC$p%{BZ95Ss zlNnjIa#5nr;ea$V$Vzmua<<(#cX7SQu}OUNfG5~v_aDvt6^4FSD6I9gWk`HB#yU>_z}(f8|*#JSRiTjA7>h3oy9*a8FNXe|9+Q+Ode8 z@UnRieAD)3DU&~xiR7#Q`BVS-h41`%yh;=u#yZOGt82b_$BP^H({?d9N?T|S*7Ie` zs9`TiC#oX#jfwML`8T>{@hHqxK5IlV_E2jhYud=>*^os?PsN0*tz0*@z`pmCY)Sd42lfEf4(q>15!>+-eUd#uq^d!7Sg+_Bz*o|A9aG zn@iIf6>;SO?}K+9 zy!74kIVQ3|#xa#Im2Uzc;4WILF6KYQ7V}NX4E6M?|$gc4M}S*(k>SBXkw3P zylweTX2|b++tsgs*ZVK3YgTB47&;V+5wxlN`RM_5^L0<%b;E=Cfp~U6cv@s|q!A%> zHaM`O`}!Y0c=bmlU!h}20X%s%+@Zep| z%nRzI%%&?8Nq}eqbud^{AO0_=9=?0?vrle$`iV_x)juX=F~%$TZ?Q>V$#pz(-54`` zbB9L;s_N~0Ql+CR6B&D@U3n5D)j5|g{m3WZ@|ExW{n9h%^$j0%V~9YQbTEDyu#S_8 z-FxGMeFs#Mjw>AfSV#Xu$;;O)#m0yB@88*3n{0Gl*4+SteAZ82_NEIn)&N2%o(Lkq z?BrtC{rD!<*CTwM$Mj}aJYD5S^~F#heKC0A)e-Pc@L4k^IWN;c)Rk;<{fJc4)HEAW zxrp)*yhK%!l}x_mHtUueyuesEJ4DJ#ZG5<=>%f4@!evOMeAG_t?CaiqVGtjIMKW0+ z#jj7yyXhCV!`cYM>X2heFy%dD7#>O?iyzANiDC-Nh!b7;1VVGgdAes`+W3jjzUgm2 z|Ka6lH`=vABhXf>(IL!>j`VqV{CeH+Vd0WQNk&VfgwoJIude&iEse>Rs7f#i#K}A~ z$oTGXF53U?pZvpz{^p|UHsx0dWjyG^@`bs$Md_?b{q`T7{q<}9Z26jIr#j`v^<0v4 zs5pm12ZYO`ssIXh3T$KG*`uIXct-;@Iz|hxkK%zH~i~&{_-#X z>CG$8ucJ|Dnk-_mvWvpdMV%TIZ&X!t)m6W<@~=Pt?tlN@XV$D;5d!p-1b#)mtl(Fn zIc0d;jvREMr(t~Z3zvwwOIO5ETxd%!I8*(fFTZoinRAA+hy5TWry??T^Hww$bB3Jj zzIPknM#R|Co3$L5Y#23MJO~JiFRO08_C91E2v{7Rg(6R*v`|F2GIU0Ih?8N<^# zc^1;61BZtnzrPd7nh`clw+{-~kyF6|u`*1Hr~*?^;0rV+nyw%Zxh=yTiGcIfoi{(6 z8Rmxi+pw$1oNDLSeBjSlp7xqH-%Z2R2)BitITakg|7sY=6ldr$@-KXZPLBV)nA3Y2oCER4HCs=pK z3)D+b$EA=`)4bYvXV*|i&QAlC^7(=OD5pa9sMsiKSKMv-X=&BH6~7(%6( za_eCd-+9G*SEu|wzGHlD(CB~>i{umDVDz5fJOf}Ah-|79sKLGJk$awoRS)Y(Bu}OD z@ziB+Kj$C+>v!vy3jZ4+YbBozbD4Z8ITz2ll}6(~zNl(gsFtp-=T@38Y}PP?&?9j| zM^jGc3d5g>9%;&og90*F(vIebDD=(=uaH(~X(r~VPk!zLb@L;`xqgsAE>S3T?3}Y_ z%U(4g6FtEx?TUY86lqRPb!}HKY}}2A%JgqKaV8jk<9jaWpvJfU&K2ok1n$d-mLhfp z>BmENJOL(_OYzGS| zo)?)t|DyRIn)%RQye~3Gz)Pf2MAL#k4bB451SVt2+x!7#YV+^=@a0YO>V*Rb=%})M zauNxr;htL`5q3ub*PcmI+$KI94{>S2e%?W3Lr47WgEsqI;mOJ6Xy4 zv`8A|;-$Gvf^uWvO^P+S8};_+8)ou^U&c37z2bxd3>)c?x~oAH@tdpAhl{tGS~9PL zx3X1TySn;M->#w_{Fg&V=QAzhoA`m<-Mxn-C;@d0?Kj^$&5n`TEx&psH$X=(e21An zRJBCj@UG>2)z@Cu+T7acL>)HgkOB%0LDt&(^xod>GMH!X!k%1ee)EViV^SxVFe5yH zdgQKWhq~Ysql{2_X=SThBA31GoP123``Wg)B`uVZv1QPQIq!}w2X{X!X@z+mW3h`x z1Y!UPlL-EThebQ)YMNh*M5D`AHGk;Ce`G;3kcMSffW!ins&{Ye=-e;E97kA+l<8H^ zJhnM*qpBYkkR!)4mt1x3noH_w6G5p-TzQ*H5ZW@T{_v0gNU+vsa_t~1BcM*+jjC#wFCC$@W-J__mGJ)ZGr#LLX=IW_kJ3@v$qRgxX`M{Li@oDy zD)a1@m8LHdA{@7qoUQRm(P$aiNqpFe^~wImya{+ zEFnuN;bZZcD$CH)Q>2OeKNB}k4Rxt?cRv=%SBZ#E2COUq&RbHy>}-{@RJ=x=dESal zei*e9Qx23c(*wZ+cRj)s@)%gK2`d9nB}h?psAd^28z3Q`fiNnv_8qlIz4D@3Dw<=U ziEGFGwA#J>pl&b?C4t(q=}=E+uY;1Rt`SMeB+gE~_XC%ZZfW{j%x=tIX|J*`6YS4< zm&f4UgbxCZSkyq4uM$=eV>6H>XF3&0d+OOIx5n+lMlli*#Rn&I=^NJ8wE%kM9dS&a zmD>p@P#U%qynK~CGBOS-&5#b|9R&~ls`yX>lvBo1XRJA`cerz4c+i(GFMZffI91!8 z-$}29O)Kx``01i3H{N;UrlFn@glRmX6N3oD>He$UaT(ynm!EI*6>nSW)@QSMCPyN) zO0bPPAHC;jrumANuyI&yCfg6X(J2z^t)=XXPSKtbf)But|IhcBYv1-P8 zr|Tkm9~mCbrcnr1m%e=sdjS+B$)k(_!Qs}>Bk81IG>6PPeYI`#jwD(YBn2k&A4GP7 z_T}>$TJ>CP^0JJ26Bdj%s8uV^Z)43;|A2Bb%RvXD&8Vpmsbr6y)FvMN%dR z2=eC90O?}UhG}Do7aWz%ls7b1cug6w5Nzr&C)?W!eodprM!{7fycy%d1K8@U^H$q2 zg(jszBH6i^cMl)zVw^-G^weS$?Li!i#Ae-{kNPQ*W}r691IzUnp4xWFm2Ir|g9yN2 z^*nXq>(^MZAm?W-*U4tQ%t$UhkbUBjXGykP3RXz{*#2nFO9~*-8B%&Mj7e%l-SO-D zU<5<(%KDjw%Ujo6)FPWb^;D~Bj#~SMb28r0$jC588Z-r*!S2kuyEY4)6)7+#;c49( z;u|7D-y$_-?{dg?A;OM{a}XXy)RNO%R3vMmE02(0hsRFF6OTV5pA!(DE}vKXcOBwm zBj8nX!Mue{a6pD3n=X`?@{mk~dGsnPimtqXwc-AvZmj1}4-_akTOHVa0N&MnaZK5q zh}F8y<*d zo9wRV_rqn0U|ee8q26)Hm1|MLXUKp6l-a6c!h3O5eL~o;fQ>vT8nFG?T6gE;GQf1g z$6i9sB1v@NlLQ+~Qm|d+5XP+&V|R~goLB8RL!LcibWqTg63IHH1WC;x&#T_9Uf5(9 zTS-tm=8sEPED|`@Fvo8QJ0`%SzotC4TDZI=YYm{RFGP}kSS;zdhYxiMxKLzGOcZW0 z`y=vG0bgh(8hI@Xo5c){b|#?IQ1^t=Q>}Qxmrsqz_fMhCiiZmiw;9m|8&zVHP$B*+ z_D?~0{EDsL%qv#t-cuzsZy~)cpkG=Uw!IWa8C^3N5s3zJ8#5UAb?KG9LYXV!)Tvy7 z_^h4V(e!IBStK83ogHI}?z&{9yJThi3y<%MT2UjhhV9z7wQK8>J!`IPEXuN{D%jNT zfT`l=Yto@Q(xYy_;lBFh9MtYj;0$VTwfQUGu>xgU9F?M|C>bnS>C9i+*y*K)`!ZBr zi|rzv`vy0y8(94Yt&9(@%eb7G1M&3-LBwHt<_G}r#M}n5bHZv_(BjC39OW{V1D3U< zOmK>Kd?8rFO`DI3@Lxblu|WBGZ7o0zKm(G)Z61Zwq-bhRTINH+UHonqfV)wjncAoabl! zRIlj@?5wrR<}MXM5>ZeTzzHK3QEz+iRbC`LI5IerVFu|V z-|>6hJAd^69XgbPNoYO!pfF=D^XgcB$(;+W%9MKknQdA0No7dNdKt6ZOWts9ZLj( zXx0S_JSMKF1zT!vD|5suJBW*(ot~dlYIseT0c6xz=0lPAjVl2#+R*X zt*%QXwPW8UjN9f7TlZ{2fwQ!L7C&8qgy79WlQcQ^gHhaSPP(sg% z2CutntrZhN5}^PO(}#RpwXd|A+mlr_vi%zhfWeL*dtuYgy)Oh}kwWnB=mq(jfF{8Z zvzRJMa>Z(cXk0F`Mh8_nl^uaH2=7IHFTwN*sFcSzo(WSduJEMz2MzqN35w6A^;Zf) z+r_A$AXs^vaDG~RSgcR+p1&csnrA{KLo3;g>A)~P`RJ~QSL5bt9FMDtE0U{1ug&qQ z^;P5eNt>NkQ@9DTog(x}^J;7po-_r2X$m@Vs~0*bFXcUbc2HK_#&q1;+FJcu7&N&w zHVqw8Pd@S-yLv=dpLqt>4J>c*sSDO#){Y(ph(LQ3$C(~|33b_<*D@V^Otmoo6%4ODYx&aCVn6^GXL^qz5)32T zmEUpo8&kOT`_5m8+^ ziwgCMq2zBoF6yIWoa2CiW1daxRz5j28>%hrwUV|Ll8i$xlT@)jWeN9n_mtfgyi7is zWs;EBukOBa-FQRi@sJ*)x$xN3J zbd~4{|7GxtuzZpk{mAzpxo2ztp<$R9m@l4-SwNQ!kFRB_2!DW0l}JX#%#~L5kg`(h zic1!=OU{Tq7{kCqRlF9HI|?zeNNZ-@$=G8wqBFI9b8q*){s=b8g!uwr*Fy$kWtaBis8wMUdY;W+iR=pFyzm?O%O*+h9&O5<&l9y$BMRLbASgT&nnFv z3v8_h3krlFOwPH;tC7L*6Hp=!mikLK7IQr;gp6|F4*T%UXT~Hs>h+X2(!Q2Dbcj;u zmM(pR%A!q9*m<@w$hBI<81%lRRG(Z}f6e4p3dy}=3MnAu6j z#VCweu!*HFxNNyFFP?h%mi=G-+*bmPiDrtFj5g0?InZg}Jwth-oZLql*G8#4X5!8I2(fwoL7AGzi5 z*S_>E+3hWAmxa2D5kWrzHCQlZ?n69R#fE!_*4_HzTR*&9R1=Q2NaQTE6PoHDkqNPC zI|{rc2*?IQ>#&lred{g&SE7`;#>U1{b12oiU_s~B9mT?!z}8bwJ+<_l*NlxxBI8n< zuo{DdgC!nZm{}nUCvKFLLPcWbW^M{uAuW}4hw^YjaB>xFy7}fiV+l8v>9uV!p@ZZ! z8dT8-%)EG1q*vcf1;0$2(@{#S(-hqRgp!U^*4MBfw|Vnc*dpD_Nknvx5N^$%KR--| z;Oszt;-SO)b{>k^^&==nNGOz9y>niBdPBP?e9;p3-}Gp8t_4$8Di_7J6jl_2Y?P01 zfT=FY04GdM;JLN&reFX3wl}|jB?e=dbxtt8!n}{c16=ru-9OK4*c@}KL=Aw+U*78* z?78e+7baRHMGEuiT~Ec*&6eO-fpn$$8d_{QQTw2@nCW4=%>B2ozwEb{W8C8C9E8)P z2AYhVp6KE+h{jD3+C=^(#gvlo--W6@MFJ8^?bN<_5lKNPc_vGK%bvbxhcvo8DimsH zXb8q8Ge+hF_Nja9{%HnIiM)yAp70#h~_szG$PH*?$d&scojkD}B^s2s(X(M5tI>h9m% z-`z0~K`~eBNtx2V?u{2}u0kE$t~Nit-DUS<*>DBc4YD(gG3H^+F|wc#INksS{dAtz(;>%+aw$3BzW5}Mve;H=v6+zvUrP>*EkR6KRPmIaLrn1b`wp)KmgXZJ)^+{=k6 zR4>oopcF!*73b7QQfYPHZR?p@VgNNYoc6seR=Z*@qZWyb>7a<97^qS>w*JCt)WjPe zzI&r6E>a9L4vM)==Pz8F8CQB(g)uaMd1|0r-F5qe4x4dgE3myftX?Zfvx zS@s<&l!-7{2TJGJye|hNaHUZX7V|tT<*+Nm@^VzVn9i&6Z`gXT%G#G#I>Ht~4s$KP9@8Dy4(3J*#G(oubm8C3&|&X(B;$ina;&BxZgknRVL zM3%$@jOg1nnLqmY`{>gQX>jT8U)>k+*%YJC^75H71_=52%%h|vJ(k|hv{gGwVq;u6 zYjy4&cxHW{OdlvPg)!r}?3d@$aQdtdt+OeCiQ=pK?szUe;5o7%hYB&h&E*p#Z+!Q~ zVo;C=X?6c?4`GlCJ}~Ck;UzYoUAbTwxft!tllc`m%g48GI`GgP2Z4J@5m)$QZc4ut z-=LCt1VQC9p}8+j%g`?#^V}@@vHcQrUqAZqC%oZEkY!#aJL$3E!8&MlV@!@mWOfO) z>GG~xaJrh)T0NNQ#h92dm=>%~ES_n({n~ALmJtc2v#0RkWy)i2386n3greyr>_c~V z3><{HPQWn>WV5`CU1&SKbX6cM=NgaAY#gYh1D?(u0szCsLta^0F{s;E6a1 zRV1m?81oDSF@?(M4|~;besOPIycw&(dwizHty-@-8*D;sGUr@0-yEhZUp9>x=UjNY=cHKRmE!RsvZK*_tv3+)@M0w}KPeQ}$f4{l$u3tYA$up~r=CX!4%5Q#ZXYbBS8%~ul zjA0oh({s9Ng)v}a2&Qo5)TRxG_wDG2*$tR&fOA0d#VKQFUbv!mA$!zid-K^Bwg2$u zFEdU{1s;>Br+)C2^>^R+7%QhlH_9KJF3!e-Ezgatd=2)Givx5_M15O6CMgjt&ty(m zSrDDw2&C7q{@)&$m#>Zap_F};J$1e79KAr;tTw8i6~Vp*+Qd6T`5B>Q_H};@L;G##K`^86!2A zc=oi`H7A>wEvJdu$wU=Qjmh_{3zyj6@%#ZchypDnb6@}f84gKAK~%czC6KvhU2@J{ z*FAE|vgNP2s==J&&H69SdXK4S9_j6T%Vkav_*3bY#v7IDTvp)XJ0g*&MAG2bUgLTpWV!%YkFcN zV-fKO7QQ&30VDK!T*=e`tdQ9m5+@d#(BzKG<#A!zf+w}=+}E8R#L{_}jWUdoAmYZ* zZXfE&-gEoY!h(f8T`F{BHBCmit;G&>ss6TCKRfGkkkHl+4$-(L_t%PDOv(Tl06 z6=zlpuP0q{s`->@C_!_ixEY64)#!6iZtXrO^rQgdjyM`AYVrGFJTj_jBd~*Z8BxI@b=x<0 z|Jl{w*!fhildXxUDBFO@Qv~S$e|y&gB*k%_XJ50s?B0{RPd|)Gi2$G_dgd}7M zBoROsvP?-9F>xwRRIZAv;!0G(aqKErxl-jww#((Dl*AP~?25F=f1O%j8x}+o| zW$EsfrCU(Cq+wY=dT9_A5SK0q7nYWg?#J)_6YqWa&Br_E%$b>UKin@f_a@uE&ab`v z=CH2g@j?7yzS|l90?dijlBCB=rSok}JET+BHLiqi3-Q)X8|ZF8w9+8376n?1ICG9V z9xk%zx4N7P3tQ))o!kijLr|0K?1vwdFr>$%;j3FHJo?K1?(de>?KQAKF!4uUgv1fp z?fE4>77TFu{GioE3n(JLhx`W4IieJ0W4;H^^qTOj+j)1*Bk=j0VLDI=rmL+w*O1M( z%?AN5YOjcuGF4cX#R%MQxqAwGsfh;_YtYlk1$Y1P;cZj?rPx7qjFXu2ky+q(_ub2^ zlm0C{{8dTVA(@dsKJia``dwW98F~FlwKt}|MHjCd=UhM4n!GS|I-E`V5|$7WSD*33 z1bYIeWg@iyyTzBW*(UP@NtLnI-p0pHT!R;~5!>|k0aak*iSPxHjaL72dWAyt_k;dQ!azqtvjJtx#^(qe<>le3Ykm<3r-1C)I)pxiWJU1&QMLFv^-5E56Ki-(LrFPY&o9c5~-~ ze`PP-;I((OO9cW|C1KNyQkk#(M9ww^&&!7zJD=w@sOT;>t*SA(L(C?~p>>B@V^(@I zm&r!NS~#yM`Kr2Znt-C?UoC@p%)z3%MVm1mII~nHLeqY*7qPE6r68$T-b89Rhu^IM zb!6HAqTAzgSvCz_zMtFstU`U-Qb-29a@yk9^?_XI%KS&aixQ;~T76wwNjSyebZZa2 z1_+ej;y#{LP zEaRi?NS~gEE3+PXzHtDdk_a81IGC6Gh=-#e24tXcXjt&0sZ1nyt~$o=ZKtPeTJMENuWub5c=ci;Yn{C?er3kDiAWIB zNCu=+WoOn*x|jAzkbBz5Je$nFE9v&fc_(kvhYjZdTXEEmknJe2erH;Qd#^@-Rs8Zs zt=tQlie-vZQ@}eKn$6$XL5FG<63vIDpa&9ow1?0N;i%nIQZA7C2TjJj8%;1%-$vs` zA%oarIOQRKU6|b+DrbFd8?ntz&7TWf=`M0PM5fK%+X{X46MyJEXR`?%wEXj@j7_^~ z@yksM_L4i+tCx=#_vtIrV1{J9gdf<5iBw^+GMxduMbTL;H@Z1-vTej9(%_u}7a_55 zTShVsb27(2+kom6Zaa~BI|}1ZmIEaFk~>*P@-AtdCHKP$K%)$Jt_!nv$R_dIrtYD> zG4=$;HR99zxfTFK!~fm8*#MN=d|G5GmSu$7j_ymrjB~{s{YXR4>MK4%8*-i5_ORtR z(B01qYZ0{pigjD|=0sDTz3nba5xF$*N>6OVm<{Sf>1a%)iFDk@rd@;dFZ{oH!qI2U zfBR%65IQYm1sO%6uf7^t)H}b@CY&MyQb5&$;|>V3DF(I85oVnS*3|1$5Zn z{Pf^*E1i;K3Ki36_2y1&p^syAb>X?OR?mTQ*HT9Q#9-!iDwC+Ro2lREuuC+TNdAJh zn~!b={`h1~CiMdBV{Pc{tcH5$!v9tMpUhXA&y1x=&B1X1Wm3A}HzK1d*W{K9+=9^a z$^(1P`6{N@gf*sn_=50+5Tdj9VJ0WI@AN#I@3y0vQkqE`lf+z=NJXIJmP!wlZ_(JC zM>1qog4(M$ytCf~bnD?#z8Pe5lxx@;DA^~Dk@FoOXT+B|x= zi%OwzpJPq9+%rc>wnzf(=8&tXImt9Hm_@NU;hMp*xqi^)N>j^wMqDC}287SmEHh%@ zM)!D4F!vdKgHg@jnC1wa6MvfF`dJBE;lDTyXvT*o1vb_xb)v!GCyQiMmZluxKnnK+ zeK#ZHU^kI$%>fiJ3>nl3ur!XbK8=~NH1VPKjU+7JQ@+Upx$e-yJe|2z$=JEcml>!q zwst4dItdo1KZgACN`8J{+pNl_Pn3X*Wr`86zb;49%1ZTPl!9Adh$)W)^1@Fcq-Q&{ zZ~gtaQlcGnaIcbttcr-qhyL01?vDE$t7{pfGgCpJC+c_B?77<0-n z&rpW7gurz&?IKJGUnb*wXLmTDG(DIqhL)H}xR=SaA$KS&tY0B##W-Od$GXTuSH~jz z)UDviQT3Q;w>&=pH5j(gdDdn;-u-%Ib}@=N(1Z<4C*N5EK&av&!#5TRHNnM)F_ll! z8A`38rJ|=R_QbO?fvY{Bm$8_bwuUv(o}=i5vFYP)X)Tmv{5OmVPF zY8)VwoV#MYaPt9?Amo%C7ub~rVjmwh;YhpdqOMLDeZF_ix$1d$m;dHA$}B2TL5t z*8^nho;6=aoFd6KwZj|m)jCe%&|!-nuF5*A8`;L|O61!DvKwLfUD;Z{1EbV(2boSuG zWyV%~kt4wfn?MArp8vJs{bB9W%kfU&&}tkXh8%teBcz!2BjV{RRiaD{l=wZDwY`%ogWs zoA!dQA4Zu&K|#*n_RYCB8)$QskFvc@c!{o`Tx$^SJ%^deQV?&AIOWbQ%WzGIc6%+C zRA;V=7t8DmB|;mSpfiL3$ir%J6yS(5qPiOuT?UlLleg3J^F808UTl5>*LEz8tAlw}yXB6G$fI-|LlA zjl)9p4SL_-jx@#Fb?GjBgBKgR3xY=_zh9iDg3FT_r9DFZPp2!V z77Si~u$?igFDdWrEb-9>4gJ19xK|1}0dnMQRTIsOMX97pr|WPcj9un=!A@OdtC>UC zvn3&@PUZpd#zyaC?`svvigfTP65-Vk^jt6y$id~rz!5y*qKsW=l)8S5o13M;v~G{L zlw;tp!zszq_zs!GB&4LIBqaWf)PM0XwhQu^nGWNC`s;QC|5#IKLDu1zr3?Vn?^aFK zUvX9GPse5se0(C5sfkys>UYoemX)bPCqcq+vXW0vusi=y);Boul|^SES?qS$B9-#I zm}d-A485X+A~vzEN=TYiNrBa(%H&5AdOO}M&)8VXniJRPi*{ueUXVrYx}w0MaU%VIeAfZB-6w!M@)e5SKf&}p z$%Rjnq+lM=?RAMrhExyaOD)fy7_<#!Q2pWL?&cy0aO47|IwMGcu)Q>WVbYlMJb2B3 zIAExNBYL;?@vhoOZ!r z*W+U!-1mYJcNJP8ncdEtP$nH1B)Ggsq1HmFrDAC*gO{vY{a!EiS;+HN~mAnm(K{l;Pf|P>WuD z;+-F28q!)7GN^q2H~dqr9i_RNHAb^Es^PH)PB z=KhM{-bWvf$|ghAH+}-8z^_j;=K?fS$KJP3Px)+`Y3kqvr&J2Wo1r$Z%ZI<6ukf(> z4^F>Vr6I6}$BRlB^iEe@IQ!rPF^}{~*7W!MvCWT^wI6X7W2ubkDm88&)+1NKP5VYr zj%r=i=M?eK6^{9BR9zmR!}6hYWLnG6za6)gMC|u|S=^3#0_{xEOz6djDQ4U%p1w1x zmyMUB)vuOikEc%j-x$w;o1>CquQYs|*+?|Doy;<_9ovl+Fi0wUCSZ5HmAF)8nqnVW zA!b^*>xDd?A@7S_X4-cx%MrwXM8KxyQ`a7O;z2C|(|=;OERIpgDLI@E3}J?g1=7VY z4lJkOY%@CPW*m*XpU;}&N~sqkFUT&WDV8aJb+mBWhn#r^ivQE6jEo^p&)k!KiQOS>Uq z`&D~bS^IKFSy62>h5s8~p0hm>ib?hNXPu{3Z~oJb(>`3lU?~I20{fylf@Z1@xA<6I z6FNLF^Fd%{FFclWrW*IjwKu8pTY(4aF~8zszT|2|*6~GCLJtYLPT6cJ+56C<}wkkY*i zNfM{PWJ6j%vX~T#@!{?NDUQnX?*y$6$kn$AHEkCmSQ*oTpvgAVPxxM zeuG$hRWorvFLL}D9k^ipC=)#0Wx7&ng8qy68Nyud=X${Dp%K#pDP>~M&H lmo"}, ) ) + + def test_stream(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): + """Ensure llmobs records are emitted for completion endpoints when configured and there is an stream input. + + Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. + """ + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_stream.yaml"): + stream = llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + temperature=0.8, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + }, + ], + stream=True, + ) + for _ in stream: + pass + + span = mock_tracer.pop_traces()[0][0] + assert mock_llmobs_writer.enqueue.call_count == 1 + mock_llmobs_writer.enqueue.assert_called_with( + _expected_llmobs_llm_span_event( + span, + model_name="claude-3-opus-20240229", + model_provider="anthropic", + input_messages=[ + { + "content": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "role": "user", + }, + ], + output_messages=[ + {"content": 'The phrase "I think, therefore I am" (originally in Latin as', "role": "assistant"} + ], + metadata={"temperature": 0.8, "max_tokens": 15.0}, + token_metrics={"prompt_tokens": 27, "completion_tokens": 15, "total_tokens": 42}, + tags={"ml_app": ""}, + ) + ) + + def test_image(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): + """Ensure llmobs records are emitted for completion endpoints when configured and there is an image input. + + Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. + """ + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_create_image.yaml"): + llm.messages.create( + model="claude-3-opus-20240229", + max_tokens=15, + temperature=0.8, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Hello, what do you see in the following image?", + }, + { + "type": "image", + "source": { + "type": "base64", + "media_type": "image/png", + "data": Path(__file__).parent.joinpath("images/bits.png"), + }, + }, + ], + }, + ], + ) + + span = mock_tracer.pop_traces()[0][0] + assert mock_llmobs_writer.enqueue.call_count == 1 + mock_llmobs_writer.enqueue.assert_called_with( + _expected_llmobs_llm_span_event( + span, + model_name="claude-3-opus-20240229", + model_provider="anthropic", + input_messages=[ + {"content": "Hello, what do you see in the following image?", "role": "user"}, + {"content": "([IMAGE DETECTED])", "role": "user"}, + ], + output_messages=[ + { + "content": 'The image shows the logo for a company or product called "Datadog', + "role": "assistant", + } + ], + metadata={"temperature": 0.8, "max_tokens": 15.0}, + token_metrics={"prompt_tokens": 246, "completion_tokens": 15, "total_tokens": 261}, + tags={"ml_app": ""}, + ) + ) diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_create_image.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_create_image.json new file mode 100644 index 00000000000..f519f68b9a6 --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_create_image.json @@ -0,0 +1,41 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "666879b400000000", + "anthropic.request.api_key": "sk-...key>", + "anthropic.request.messages.0.content.0.text": "Hello, what do you see in the following image?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "([IMAGE DETECTED])", + "anthropic.request.messages.0.content.1.type": "image", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 15}", + "anthropic.response.completions.content.0.text": "The image shows the logo for a company or product called \"Datadog", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "b14e66142e7c4d7587b2d57c9a2102f4" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 246, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 261, + "process_id": 65263 + }, + "duration": 2900904000, + "start": 1718122932613982000 + }]] diff --git a/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_image.json b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_image.json new file mode 100644 index 00000000000..47fb207abdf --- /dev/null +++ b/tests/snapshots/tests.contrib.anthropic.test_anthropic.test_anthropic_llm_stream_image.json @@ -0,0 +1,41 @@ +[[ + { + "name": "anthropic.request", + "service": "", + "resource": "Messages.create", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "66687a7500000000", + "anthropic.request.api_key": "sk-...key>", + "anthropic.request.messages.0.content.0.text": "Hello, what do you see in the following image?", + "anthropic.request.messages.0.content.0.type": "text", + "anthropic.request.messages.0.content.1.text": "([IMAGE DETECTED])", + "anthropic.request.messages.0.content.1.type": "image", + "anthropic.request.messages.0.role": "user", + "anthropic.request.model": "claude-3-opus-20240229", + "anthropic.request.parameters": "{\"max_tokens\": 15, \"stream\": true}", + "anthropic.response.completions.content.0.text": "The image shows the logo for a company or service called \"Datadog", + "anthropic.response.completions.content.0.type": "text", + "anthropic.response.completions.finish_reason": "max_tokens", + "anthropic.response.completions.role": "assistant", + "language": "python", + "runtime-id": "83436fa5572c4621bd5960baa80ddd25" + }, + "metrics": { + "_dd.measured": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "anthropic.response.usage.input_tokens": 246, + "anthropic.response.usage.output_tokens": 15, + "anthropic.response.usage.total_tokens": 261, + "process_id": 66648 + }, + "duration": 37333448000, + "start": 1718123125393200000 + }]] From 841015cd31c9d93b3f70edfd00c8e55de5803a9a Mon Sep 17 00:00:00 2001 From: William Conti Date: Tue, 11 Jun 2024 14:08:22 -0400 Subject: [PATCH 33/33] add more tests --- .../anthropic/test_anthropic_llmobs.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/tests/contrib/anthropic/test_anthropic_llmobs.py b/tests/contrib/anthropic/test_anthropic_llmobs.py index 77f9eab564c..7dc7c14cccf 100644 --- a/tests/contrib/anthropic/test_anthropic_llmobs.py +++ b/tests/contrib/anthropic/test_anthropic_llmobs.py @@ -144,6 +144,63 @@ def test_stream(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock ) ) + def test_stream_helper(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): + """Ensure llmobs records are emitted for completion endpoints when configured and there is an stream input. + + Also ensure the llmobs records have the correct tagging including trace/span ID for trace correlation. + """ + llm = anthropic.Anthropic() + with request_vcr.use_cassette("anthropic_completion_stream_helper.yaml"): + with llm.messages.stream( + model="claude-3-opus-20240229", + max_tokens=15, + temperature=0.8, + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Can you explain what Descartes meant by 'I think, therefore I am'?", + } + ], + }, + ], + ) as stream: + for _ in stream.text_stream: + pass + + message = stream.get_final_message() + assert message is not None + + message = stream.get_final_text() + assert message is not None + + span = mock_tracer.pop_traces()[0][0] + assert mock_llmobs_writer.enqueue.call_count == 1 + mock_llmobs_writer.enqueue.assert_called_with( + _expected_llmobs_llm_span_event( + span, + model_name="claude-3-opus-20240229", + model_provider="anthropic", + input_messages=[ + { + "content": "Can you explain what Descartes meant by 'I think, therefore I am'?", + "role": "user", + }, + ], + output_messages=[ + { + "content": 'The famous philosophical statement "I think, therefore I am" (originally in', + "role": "assistant", + } + ], + metadata={"temperature": 0.8, "max_tokens": 15.0}, + token_metrics={"prompt_tokens": 27, "completion_tokens": 15, "total_tokens": 42}, + tags={"ml_app": ""}, + ) + ) + def test_image(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr): """Ensure llmobs records are emitted for completion endpoints when configured and there is an image input.