diff --git a/.circleci/config.templ.yml b/.circleci/config.templ.yml index 0dee5155002..73994eab222 100644 --- a/.circleci/config.templ.yml +++ b/.circleci/config.templ.yml @@ -16,7 +16,7 @@ mongo_image: &mongo_image mongo:3.6@sha256:19c11a8f1064fd2bb713ef1270f79a742a184 httpbin_image: &httpbin_image kennethreitz/httpbin@sha256:2c7abc4803080c22928265744410173b6fea3b898872c01c5fd0f0f9df4a59fb vertica_image: &vertica_image vertica/vertica-ce:latest rabbitmq_image: &rabbitmq_image rabbitmq:3.7-alpine -testagent_image: &testagent_image ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.17.0 +testagent_image: &testagent_image ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.20.0 parameters: coverage: diff --git a/.github/workflows/build_deploy.yml b/.github/workflows/build_deploy.yml index 77d52c757f5..bc6a8b0b3d2 100644 --- a/.github/workflows/build_deploy.yml +++ b/.github/workflows/build_deploy.yml @@ -25,7 +25,7 @@ jobs: build_wheels: uses: ./.github/workflows/build_python_3.yml with: - cibw_build: 'cp37* cp38* cp39* cp310* cp311* cp312*' + cibw_build: 'cp37* cp38* cp39* cp310* cp311* cp312* cp313*' build_sdist: name: Build source distribution @@ -40,7 +40,7 @@ jobs: - uses: actions/setup-python@v5 name: Install Python with: - python-version: '3.7' + python-version: '3.12' - name: Build sdist run: | pip install "setuptools_scm[toml]>=4" "cython" "cmake>=3.24.2,<3.28" "setuptools-rust" diff --git a/.github/workflows/build_python_3.yml b/.github/workflows/build_python_3.yml index 02832a008b9..fac67e45f82 100644 --- a/.github/workflows/build_python_3.yml +++ b/.github/workflows/build_python_3.yml @@ -25,7 +25,7 @@ jobs: - uses: actions/setup-python@v5 with: python-version: '3.8' - - run: pip install cibuildwheel==2.16.5 + - run: pip install cibuildwheel==2.22.0 - id: set-matrix env: CIBW_BUILD: ${{ inputs.cibw_build }} @@ -34,7 +34,7 @@ jobs: { cibuildwheel --print-build-identifiers --platform linux --arch x86_64,i686 | jq -cR '{only: ., os: "ubuntu-latest"}' \ && cibuildwheel --print-build-identifiers --platform linux --arch aarch64 | jq -cR '{only: ., os: "arm-4core-linux"}' \ - && cibuildwheel --print-build-identifiers --platform windows --arch AMD64,x86 | jq -cR '{only: ., os: "windows-latest"}' \ + && cibuildwheel --print-build-identifiers --platform windows --arch AMD64,x86 | grep -v 313 | jq -cR '{only: ., os: "windows-latest"}' \ && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,universal2 | jq -cR '{only: ., os: "macos-13"}' } | jq -sc ) @@ -83,7 +83,7 @@ jobs: - name: Build wheels arm64 if: always() && matrix.os == 'arm-4core-linux' - run: /home/runner/.local/bin/pipx run cibuildwheel==2.16.5 --only ${{ matrix.only }} + run: /home/runner/.local/bin/pipx run cibuildwheel==2.22.0 --only ${{ matrix.only }} env: CIBW_SKIP: ${{ inputs.cibw_skip }} CIBW_PRERELEASE_PYTHONS: ${{ inputs.cibw_prerelease_pythons }} @@ -107,7 +107,7 @@ jobs: rm -rf ./tempwheelhouse CIBW_REPAIR_WHEEL_COMMAND_MACOS: | zip -d {wheel} \*.c \*.cpp \*.cc \*.h \*.hpp \*.pyx && - delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel} + MACOSX_DEPLOYMENT_TARGET=12.7 delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel} CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: choco install -y 7zip && 7z d -r "{wheel}" *.c *.cpp *.cc *.h *.hpp *.pyx && @@ -117,7 +117,7 @@ jobs: - name: Build wheels if: always() && matrix.os != 'arm-4core-linux' - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.22.0 with: only: ${{ matrix.only }} env: @@ -143,7 +143,7 @@ jobs: rm -rf ./tempwheelhouse CIBW_REPAIR_WHEEL_COMMAND_MACOS: | zip -d {wheel} \*.c \*.cpp \*.cc \*.h \*.hpp \*.pyx && - delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel} + MACOSX_DEPLOYMENT_TARGET=12.7 delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel} CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: choco install -y 7zip && 7z d -r "{wheel}" *.c *.cpp *.cc *.h *.hpp *.pyx && diff --git a/.github/workflows/generate-package-versions.yml b/.github/workflows/generate-package-versions.yml index 4db524c3d04..b8729e882c9 100644 --- a/.github/workflows/generate-package-versions.yml +++ b/.github/workflows/generate-package-versions.yml @@ -8,7 +8,7 @@ on: jobs: generate-package-versions: name: Generate package versions - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: actions: read contents: write @@ -49,6 +49,11 @@ jobs: with: python-version: "3.12" + - name: Setup Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Set up QEMU uses: docker/setup-qemu-action@v2 diff --git a/.github/workflows/generate-supported-versions.yml b/.github/workflows/generate-supported-versions.yml new file mode 100644 index 00000000000..c802e91bcf3 --- /dev/null +++ b/.github/workflows/generate-supported-versions.yml @@ -0,0 +1,121 @@ +name: Generate Supported Integration Versions + +on: + workflow_dispatch: # can be triggered manually + +jobs: + generate-supported-versions: + name: Generate supported integration versions + runs-on: ubuntu-22.04 + permissions: + actions: read + contents: write + pull-requests: write + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Python 3.7 + uses: actions/setup-python@v5 + with: + python-version: "3.7" + + - name: Setup Python 3.8 + uses: actions/setup-python@v5 + with: + python-version: "3.8" + + - name: Setup Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: "3.9" + + - name: Setup Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Setup Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Setup Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Setup Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y libmariadb-dev + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install packaging + pip install requests + pip install riot==0.20.1 + pip install wrapt==1.16.0 + + - name: Install ddtrace + run: | + pip install -e . + + - run: python scripts/freshvenvs.py generate + + - name: Generate table + run: python scripts/generate_table.py + + - run: git diff + + - name: Create Pull Request + id: pr + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + branch: "update-supported-versions" + commit-message: "Update supported versions table" + delete-branch: true + base: main + title: "chore: update supported versions" + labels: changelog/no-changelog + body: | + Generates / updates the supported versions table for integrations. + This should be tied to releases, or triggered manually. + Workflow runs: [Generate Supported Integration Versions](https://github.com/DataDog/dd-trace-py/actions/workflows/generate-supported-versions.yml) + + ## Checklist + - [x] PR author has checked that all the criteria below are met + - The PR description includes an overview of the change + - The PR description articulates the motivation for the change + - The change includes tests OR the PR description describes a testing strategy + - The PR description notes risks associated with the change, if any + - Newly-added code is easy to change + - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) + - The change includes or references documentation updates if necessary + - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) + + ## Reviewer Checklist + - [ ] Reviewer has checked that all the criteria below are met + - Title is accurate + - All changes are related to the pull request's stated goal + - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes + - Testing strategy adequately addresses listed risks + - Newly-added code is easy to change + - Release note makes sense to a user of the library + - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment + - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) diff --git a/.github/workflows/profiling-native.yml b/.github/workflows/profiling-native.yml index 98722552dbd..280d586d36e 100644 --- a/.github/workflows/profiling-native.yml +++ b/.github/workflows/profiling-native.yml @@ -20,7 +20,7 @@ jobs: matrix: os: [ubuntu-24.04] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] - sanitizer: ["safety", "thread"] + sanitizer: ["safety", "thread", "valgrind"] steps: - uses: actions/checkout@v4 @@ -40,6 +40,10 @@ jobs: chmod +x llvm.sh sudo ./llvm.sh 19 + - name: Install Valgrind + run: | + sudo apt-get install -y valgrind + - name: Run tests with sanitizers run: | # DEV: We currently have tests in dd_wrapper and stack_v2, setting diff --git a/.github/workflows/pytorch_gpu_tests.yml b/.github/workflows/pytorch_gpu_tests.yml new file mode 100644 index 00000000000..1db504ae61d --- /dev/null +++ b/.github/workflows/pytorch_gpu_tests.yml @@ -0,0 +1,43 @@ +name: Pytorch Unit Tests (with GPU) + +on: + push: + branches: + - 'main' + - 'mq-working-branch**' + paths: + - 'ddtrace/profiling/collector/pytorch.py' + pull_request: + paths: + - 'ddtrace/profiling/collector/pytorch.py' + workflow_dispatch: + +jobs: + unit-tests: + runs-on: APM-4-CORE-GPU-LINUX + steps: + - uses: actions/checkout@v4 + # Include all history and tags + with: + persist-credentials: false + fetch-depth: 0 + + - uses: actions/setup-python@v5 + name: Install Python + with: + python-version: '3.12' + + - uses: actions-rust-lang/setup-rust-toolchain@v1 + - name: Install latest stable toolchain and rustfmt + run: rustup update stable && rustup default stable && rustup component add rustfmt clippy + + - name: Install hatch + uses: pypa/hatch@install + with: + version: "1.12.0" + + - name: Install PyTorch + run: pip install torch + + - name: Run tests + run: hatch run profiling_pytorch:test diff --git a/.github/workflows/requirements-locks.yml b/.github/workflows/requirements-locks.yml index 69400d35dbd..23a1c05a517 100644 --- a/.github/workflows/requirements-locks.yml +++ b/.github/workflows/requirements-locks.yml @@ -11,7 +11,7 @@ jobs: validate: name: Check requirements lockfiles runs-on: ubuntu-latest - container: ghcr.io/datadog/dd-trace-py/testrunner:47c7b5287da25643e46652e6d222a40a52f2382a@sha256:3a02dafeff9cd72966978816d1b39b54f5517af4049396923b95c8452f604269 + container: ghcr.io/datadog/dd-trace-py/testrunner:0a50e839f4b1600f02157518b8d016451b346578@sha256:5dae9bc7872f69b31b612690f0748c7ad71ab90ef28a754b2ae93d0ba505837b steps: - uses: actions/checkout@v4 with: @@ -23,7 +23,7 @@ jobs: run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - name: Set python interpreters - run: pyenv global 3.10 3.7 3.8 3.9 3.11 3.12 + run: pyenv global 3.10 3.7 3.8 3.9 3.11 3.12 3.13 - name: Install Dependencies run: pip install --upgrade pip && pip install riot==0.20.1 diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 697b0f77c48..ccf6c6501d9 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -54,7 +54,7 @@ jobs: # system-tests requires an API_KEY, but it does not have to be a valid key, as long as we don't run a scenario # that make assertion on backend data. Using a fake key allow to run system tests on PR originating from forks. # If ever it's needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY }} - DD_API_KEY: 1234567890abcdef1234567890abcdef + DD_API_KEY: ${{ secrets.FAKE_DD_API_KEY }} CMAKE_BUILD_PARALLEL_LEVEL: 12 SYSTEM_TESTS_AWS_ACCESS_KEY_ID: ${{ secrets.IDM_AWS_ACCESS_KEY_ID }} SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }} @@ -106,7 +106,7 @@ jobs: # system-tests requires an API_KEY, but it does not have to be a valid key, as long as we don't run a scenario # that make assertion on backend data. Using a fake key allow to run system tests on PR originating from forks. # If ever it's needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY }} - DD_API_KEY: 1234567890abcdef1234567890abcdef + DD_API_KEY: ${{ secrets.FAKE_DD_API_KEY }} CMAKE_BUILD_PARALLEL_LEVEL: 12 SYSTEM_TESTS_AWS_ACCESS_KEY_ID: ${{ secrets.IDM_AWS_ACCESS_KEY_ID }} SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }} @@ -153,6 +153,14 @@ jobs: if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'other' run: ./run.sh CROSSED_TRACING_LIBRARIES + - name: Run PROFILING + if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'other' + run: | + cat /proc/sys/kernel/perf_event_paranoid + sudo sysctl kernel.perf_event_paranoid=1 + sudo sysctl -p + ./run.sh PROFILING + - name: Run REMOTE_CONFIG_MOCKED_BACKEND_ASM_FEATURES if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'remote-config' run: ./run.sh REMOTE_CONFIG_MOCKED_BACKEND_ASM_FEATURES @@ -205,6 +213,10 @@ jobs: if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'appsec-1' run: ./run.sh IAST_STANDALONE + - name: Run SCA_STANDALONE + if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'appsec-1' + run: ./run.sh SCA_STANDALONE + - name: Run APPSEC_RUNTIME_ACTIVATION if: always() && steps.docker_load.outcome == 'success' && matrix.scenario == 'appsec-1' run: ./run.sh APPSEC_RUNTIME_ACTIVATION diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b05126541d2..748942af278 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,6 +2,7 @@ stages: - package - tests-gen - tests-trigger + - quality-gate - shared-pipeline - benchmarks - macrobenchmarks @@ -87,3 +88,16 @@ deploy_to_di_backend:manual: UPSTREAM_COMMIT_AUTHOR: $CI_COMMIT_AUTHOR UPSTREAM_TAG: $CI_COMMIT_TAG UPSTREAM_PACKAGE_JOB: build + +check_new_flaky_tests: + stage: quality-gate + extends: .testrunner + script: + - export DD_SITE=datadoghq.com + - export DD_API_KEY=$(aws ssm get-parameter --region us-east-1 --name ci.${CI_PROJECT_NAME}.dd-api-key-qualitygate --with-decryption --query "Parameter.Value" --out text) + - export DD_APP_KEY=$(aws ssm get-parameter --region us-east-1 --name ci.${CI_PROJECT_NAME}.dd-app-key-qualitygate --with-decryption --query "Parameter.Value" --out text) + - datadog-ci gate evaluate + except: + - main + - '[0-9].[0-9]*' + - 'mq-working-branch**' diff --git a/.gitlab/download-dependency-wheels.sh b/.gitlab/download-dependency-wheels.sh index 431e662e4c7..c80c60af07b 100755 --- a/.gitlab/download-dependency-wheels.sh +++ b/.gitlab/download-dependency-wheels.sh @@ -20,7 +20,7 @@ export PYTHONUNBUFFERED=TRUE --local-ddtrace \ --arch x86_64 \ --arch aarch64 \ - --platform musllinux_1_1 \ + --platform musllinux_1_2 \ --platform manylinux2014 \ --output-dir ../pywheels-dep \ --verbose diff --git a/.gitlab/package.yml b/.gitlab/package.yml index 74d76bc0ae4..973e2d55d3f 100644 --- a/.gitlab/package.yml +++ b/.gitlab/package.yml @@ -31,6 +31,8 @@ download_dependency_wheels: PYTHON_VERSION: "3.11" - PYTHON_IMAGE_TAG: "3.12.0" PYTHON_VERSION: "3.12" + - PYTHON_IMAGE_TAG: "3.13.0" + PYTHON_VERSION: "3.13" script: - .gitlab/download-dependency-wheels.sh artifacts: diff --git a/.gitlab/services.yml b/.gitlab/services.yml index 3adcb973e89..51e28c38cc5 100644 --- a/.gitlab/services.yml +++ b/.gitlab/services.yml @@ -12,7 +12,7 @@ DD_REMOTE_CONFIGURATION_REFRESH_INTERVAL: 5s DD_DOGSTATSD_NON_LOCAL_TRAFFIC: true testagent: - name: registry.ddbuild.io/images/mirror/dd-apm-test-agent/ddapm-test-agent:v1.17.0 + name: registry.ddbuild.io/images/mirror/dd-apm-test-agent/ddapm-test-agent:v1.20.0 alias: testagent variables: LOG_LEVEL: INFO diff --git a/.gitlab/testrunner.yml b/.gitlab/testrunner.yml index f1fd4806506..fe9fb34bec6 100644 --- a/.gitlab/testrunner.yml +++ b/.gitlab/testrunner.yml @@ -1,9 +1,9 @@ .testrunner: - image: registry.ddbuild.io/images/mirror/dd-trace-py/testrunner:47c7b5287da25643e46652e6d222a40a52f2382a@sha256:3a02dafeff9cd72966978816d1b39b54f5517af4049396923b95c8452f604269 + image: registry.ddbuild.io/images/mirror/dd-trace-py/testrunner:0a50e839f4b1600f02157518b8d016451b346578@sha256:5dae9bc7872f69b31b612690f0748c7ad71ab90ef28a754b2ae93d0ba505837b # DEV: we have a larger pool of amd64 runners, prefer that over arm64 tags: [ "arch:amd64" ] timeout: 20m before_script: - ulimit -c unlimited - - pyenv global 3.12 3.7 3.8 3.9 3.10 3.11 3.13-dev + - pyenv global 3.12 3.7 3.8 3.9 3.10 3.11 3.13 - export _CI_DD_AGENT_URL=http://${HOST_IP}:8126/ diff --git a/.gitlab/tests.yml b/.gitlab/tests.yml index 83a5d4231b8..b8c9a3d9897 100644 --- a/.gitlab/tests.yml +++ b/.gitlab/tests.yml @@ -1,5 +1,7 @@ stages: - - tests + - precheck + - riot + - hatch variables: RIOT_RUN_CMD: riot -P -v run --exitfirst --pass-env -s @@ -8,26 +10,21 @@ variables: PYTEST_ADDOPTS: "-s" # CI_DEBUG_SERVICES: "true" -.testrunner: - image: registry.ddbuild.io/images/mirror/dd-trace-py/testrunner:47c7b5287da25643e46652e6d222a40a52f2382a@sha256:3a02dafeff9cd72966978816d1b39b54f5517af4049396923b95c8452f604269 - # DEV: we have a larger pool of amd64 runners, prefer that over arm64 - tags: [ "arch:amd64" ] - timeout: 20m - before_script: - - pyenv global 3.12 3.7 3.8 3.9 3.10 3.11 3.13-dev - - export _CI_DD_AGENT_URL=http://${HOST_IP}:8126/ - - -{{services.yml}} +include: + - local: ".gitlab/services.yml" + - local: ".gitlab/testrunner.yml" .test_base_hatch: extends: .testrunner - stage: tests + stage: hatch # Hatch doesn't use pre-built wheels or venvs so we can start them right away needs: [] parallel: 4 # DEV: This is the max retries that GitLab currently allows for retry: 2 + before_script: + - !reference [.testrunner, before_script] + - pip install riot==0.20.1 script: - export PYTEST_ADDOPTS="${PYTEST_ADDOPTS} --ddtrace" - export _DD_CIVISIBILITY_USE_CI_CONTEXT_PROVIDER=true @@ -49,7 +46,7 @@ variables: services: - !reference [.services, testagent] before_script: - - !reference [.testrunner, before_script] + - !reference [.test_base_hatch, before_script] # DEV: All job variables get shared with services, setting `DD_TRACE_AGENT_URL` on the testagent will tell it to forward all requests to the # agent at that host. Therefore setting this as a variable will cause recursive requests to the testagent - export DD_TRACE_AGENT_URL="http://testagent:9126" @@ -57,10 +54,10 @@ variables: build_base_venvs: extends: .testrunner - stage: tests + stage: riot parallel: matrix: - - PYTHON_VERSION: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + - PYTHON_VERSION: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] variables: CMAKE_BUILD_PARALLEL_LEVEL: 12 PIP_VERBOSE: 1 @@ -76,21 +73,24 @@ build_base_venvs: - ddtrace/internal/datadog/profiling/crashtracker/crashtracker_exe* - ddtrace/internal/datadog/profiling/test/test_* +# Do not define a `needs:` in order to depend on the whole `precheck` stage .test_base_riot: extends: .testrunner - stage: tests + stage: riot needs: [ build_base_venvs ] parallel: 4 services: - !reference [.services, ddagent] # DEV: This is the max retries that GitLab currently allows for retry: 2 - script: + before_script: + - !reference [.testrunner, before_script] - pip install riot==0.20.1 - unset DD_SERVICE - unset DD_ENV - unset DD_TAGS - unset DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED + script: - | hashes=( $(riot list --hash-only "${SUITE_NAME}" | sort | ./.gitlab/ci-split-input.sh) ) if [[ ${#hashes[@]} -eq 0 ]]; then @@ -113,7 +113,7 @@ build_base_venvs: - !reference [.test_base_riot, services] - !reference [.services, testagent] before_script: - - !reference [.testrunner, before_script] + - !reference [.test_base_riot, before_script] # DEV: All job variables get shared with services, setting `DD_TRACE_AGENT_URL` on the testagent will tell it to forward all requests to the # agent at that host. Therefore setting this as a variable will cause recursive requests to the testagent - export DD_TRACE_AGENT_URL="http://testagent:9126" diff --git a/.riot/requirements/102dfdd.txt b/.riot/requirements/102dfdd.txt new file mode 100644 index 00000000000..40bf3c75049 --- /dev/null +++ b/.riot/requirements/102dfdd.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/102dfdd.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +structlog==20.2.0 diff --git a/.riot/requirements/104daf8.txt b/.riot/requirements/104daf8.txt new file mode 100644 index 00000000000..e25e2cb84d2 --- /dev/null +++ b/.riot/requirements/104daf8.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/104daf8.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opensearch-py[requests]==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/104f450.txt b/.riot/requirements/104f450.txt new file mode 100644 index 00000000000..a9bf25ae538 --- /dev/null +++ b/.riot/requirements/104f450.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/104f450.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +logbook==1.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1050efa.txt b/.riot/requirements/1050efa.txt index d69750cdf3f..df4832ccbd1 100644 --- a/.riot/requirements/1050efa.txt +++ b/.riot/requirements/1050efa.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/1050efa.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/1053dce.txt b/.riot/requirements/1053dce.txt new file mode 100644 index 00000000000..5b1c1d31dbe --- /dev/null +++ b/.riot/requirements/1053dce.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1053dce.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/11063bf.txt b/.riot/requirements/11063bf.txt index 672870755a7..5f06cd4e04a 100644 --- a/.riot/requirements/11063bf.txt +++ b/.riot/requirements/11063bf.txt @@ -4,38 +4,38 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/11063bf.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==5.4.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 faiss-cpu==1.8.0 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 jmespath==1.0.1 jsonpatch==1.33 @@ -49,8 +49,8 @@ langchain-core==0.1.52 langchain-openai==0.1.6 langchain-pinecone==0.1.0 langchain-text-splitters==0.0.2 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -58,36 +58,38 @@ numexpr==2.8.5 numpy==1.26.4 openai==1.30.3 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 pinecone-client==3.2.2 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 +tiktoken==0.8.0 tokenizers==0.19.1 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/114bad8.txt b/.riot/requirements/114bad8.txt new file mode 100644 index 00000000000..27a7f4e24f7 --- /dev/null +++ b/.riot/requirements/114bad8.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/114bad8.in +# +attrs==24.2.0 +blinker==1.8.2 +click==8.1.7 +coverage[toml]==7.6.1 +flask==3.0.3 +flask-caching==1.10.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-memcached==1.62 +redis==5.1.1 +sortedcontainers==2.4.0 +werkzeug==3.0.4 diff --git a/.riot/requirements/11f2bd0.txt b/.riot/requirements/11f2bd0.txt new file mode 100644 index 00000000000..fdab5d63d33 --- /dev/null +++ b/.riot/requirements/11f2bd0.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/11f2bd0.in +# +annotated-types==0.7.0 +attrs==24.2.0 +blinker==1.8.2 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==8.1.7 +coverage[toml]==7.6.1 +flask==2.3.3 +flask-openapi3==4.0.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +typing-extensions==4.12.2 +urllib3==1.26.20 +werkzeug==2.3.8 +zipp==3.20.2 diff --git a/.riot/requirements/196755b.txt b/.riot/requirements/11fd02a.txt similarity index 59% rename from .riot/requirements/196755b.txt rename to .riot/requirements/11fd02a.txt index 250298e3848..c00ae722bbb 100644 --- a/.riot/requirements/196755b.txt +++ b/.riot/requirements/11fd02a.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --no-annotate .riot/requirements/196755b.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/11fd02a.in # attrs==24.2.0 coverage[toml]==7.6.1 @@ -15,6 +15,5 @@ pluggy==1.5.0 pytest==8.3.3 pytest-cov==5.0.0 pytest-mock==3.14.0 -ruamel-yaml==0.18.6 -ruamel-yaml-clib==0.2.8 +pytest-randomly==3.15.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/12594bd.txt b/.riot/requirements/12594bd.txt index e8e2f1234f5..9311ce66a7f 100644 --- a/.riot/requirements/12594bd.txt +++ b/.riot/requirements/12594bd.txt @@ -2,25 +2,25 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/12594bd.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/12594bd.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 -exceptiongroup==1.2.0 +async-timeout==5.0.1 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.9 +exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==7.0.1 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/1261ed3.txt b/.riot/requirements/1261ed3.txt new file mode 100644 index 00000000000..cf97c1bc502 --- /dev/null +++ b/.riot/requirements/1261ed3.txt @@ -0,0 +1,31 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1261ed3.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiohttp-jinja2==1.5.1 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/12c10e8.txt b/.riot/requirements/12c10e8.txt index abe4c79bdd0..75ea709c67a 100644 --- a/.riot/requirements/12c10e8.txt +++ b/.riot/requirements/12c10e8.txt @@ -5,20 +5,20 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/12c10e8.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/12cb0e7.txt b/.riot/requirements/12cb0e7.txt index 95dd85db63b..303f7985e32 100644 --- a/.riot/requirements/12cb0e7.txt +++ b/.riot/requirements/12cb0e7.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/12cb0e7.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/1304e20.txt b/.riot/requirements/1304e20.txt new file mode 100644 index 00000000000..54f718e4122 --- /dev/null +++ b/.riot/requirements/1304e20.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1304e20.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==4.2.16 +django-configurations==2.5.1 +djangorestframework==3.15.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 diff --git a/.riot/requirements/1332b9d.txt b/.riot/requirements/1332b9d.txt new file mode 100644 index 00000000000..49dced5d336 --- /dev/null +++ b/.riot/requirements/1332b9d.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1332b9d.in +# +asn1crypto==1.5.1 +attrs==24.2.0 +certifi==2024.8.30 +cffi==1.17.1 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +cryptography==38.0.4 +filelock==3.16.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +platformdirs==4.3.6 +pluggy==1.5.0 +pycparser==2.22 +pyjwt==2.9.0 +pyopenssl==23.2.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +pytz==2024.2 +requests==2.32.3 +responses==0.16.0 +six==1.16.0 +snowflake-connector-python==3.12.2 +sortedcontainers==2.4.0 +tomlkit==0.13.2 +typing-extensions==4.12.2 +urllib3==2.2.3 diff --git a/.riot/requirements/1337ee3.txt b/.riot/requirements/1337ee3.txt new file mode 100644 index 00000000000..1b296ead110 --- /dev/null +++ b/.riot/requirements/1337ee3.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1337ee3.in +# +attrs==24.2.0 +azure-functions==1.21.3 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.6.1 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +tomli==2.1.0 +urllib3==2.2.3 diff --git a/.riot/requirements/13658ae.txt b/.riot/requirements/13658ae.txt new file mode 100644 index 00000000000..e4ac641af5c --- /dev/null +++ b/.riot/requirements/13658ae.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/13658ae.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch==8.15.1 +elasticsearch7==7.17.12 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/136fddd.txt b/.riot/requirements/136fddd.txt new file mode 100644 index 00000000000..848b88850d0 --- /dev/null +++ b/.riot/requirements/136fddd.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/136fddd.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +elasticsearch5==5.5.6 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/1374394.txt b/.riot/requirements/1374394.txt new file mode 100644 index 00000000000..9e287a285b0 --- /dev/null +++ b/.riot/requirements/1374394.txt @@ -0,0 +1,34 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1374394.in +# +astunparse==1.6.3 +attrs==24.2.0 +blinker==1.8.2 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==8.1.7 +coverage[toml]==7.6.1 +flask==3.0.3 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +six==1.16.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 +virtualenv-clone==0.5.7 +werkzeug==3.0.4 +wheel==0.44.0 diff --git a/.riot/requirements/1381214.txt b/.riot/requirements/1381214.txt new file mode 100644 index 00000000000..583f505bac4 --- /dev/null +++ b/.riot/requirements/1381214.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1381214.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +dramatiq==1.17.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +prometheus-client==0.21.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +redis==5.1.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/13ae267.txt b/.riot/requirements/13ae267.txt new file mode 100644 index 00000000000..72f91d44446 --- /dev/null +++ b/.riot/requirements/13ae267.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/13ae267.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +loguru==0.7.2 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/141bfd1.txt b/.riot/requirements/141bfd1.txt new file mode 100644 index 00000000000..ca6a38880e2 --- /dev/null +++ b/.riot/requirements/141bfd1.txt @@ -0,0 +1,32 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/141bfd1.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==7.1.2 +coverage[toml]==7.6.1 +flask==1.1.4 +gunicorn==23.0.0 +httpretty==1.0.5 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +itsdangerous==1.1.0 +jinja2==2.11.3 +markupsafe==1.1.1 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.2.3 +werkzeug==1.0.1 diff --git a/.riot/requirements/141f7eb.txt b/.riot/requirements/141f7eb.txt new file mode 100644 index 00000000000..d8494646e5d --- /dev/null +++ b/.riot/requirements/141f7eb.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/141f7eb.in +# +attrs==24.2.0 +cattrs==22.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +molten==1.0.2 +mypy-extensions==1.0.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +typing-extensions==3.10.0.2 +typing-inspect==0.6.0 diff --git a/.riot/requirements/1463930.txt b/.riot/requirements/1463930.txt new file mode 100644 index 00000000000..313484f83ce --- /dev/null +++ b/.riot/requirements/1463930.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1463930.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.0.8 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/147bedb.txt b/.riot/requirements/147bedb.txt index d128fe5aaa2..b03efd4dc82 100644 --- a/.riot/requirements/147bedb.txt +++ b/.riot/requirements/147bedb.txt @@ -5,20 +5,20 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/147bedb.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/14b54db.txt b/.riot/requirements/14b54db.txt new file mode 100644 index 00000000000..6b103b5f841 --- /dev/null +++ b/.riot/requirements/14b54db.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/14b54db.in +# +attrs==24.2.0 +azure-functions==1.21.3 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.6.8 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/14be2f6.txt b/.riot/requirements/14be2f6.txt new file mode 100644 index 00000000000..0a516b36c05 --- /dev/null +++ b/.riot/requirements/14be2f6.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/14be2f6.in +# +algoliasearch==2.6.3 +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/14d7e8a.txt b/.riot/requirements/14d7e8a.txt new file mode 100644 index 00000000000..979467f1e35 --- /dev/null +++ b/.riot/requirements/14d7e8a.txt @@ -0,0 +1,31 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/14d7e8a.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiohttp-jinja2==1.6 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/14f1594.txt b/.riot/requirements/14f1594.txt new file mode 100644 index 00000000000..16c4e6c559a --- /dev/null +++ b/.riot/requirements/14f1594.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/14f1594.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mongoengine==0.29.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymongo==3.12.3 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/151a249.txt b/.riot/requirements/151a249.txt new file mode 100644 index 00000000000..e43376d1755 --- /dev/null +++ b/.riot/requirements/151a249.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/151a249.in +# +attrs==24.2.0 +coverage[toml]==7.6.9 +hypothesis==6.45.0 +iniconfig==2.0.0 +lxml==5.3.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +ruamel-yaml==0.18.6 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/151d7b0.txt b/.riot/requirements/151d7b0.txt new file mode 100644 index 00000000000..9593b418017 --- /dev/null +++ b/.riot/requirements/151d7b0.txt @@ -0,0 +1,41 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/151d7b0.in +# +amqp==2.6.1 +attrs==24.3.0 +cassandra-driver==3.29.2 +certifi==2024.12.14 +charset-normalizer==3.4.0 +click==8.1.7 +coverage[toml]==7.6.9 +exceptiongroup==1.2.2 +future==1.0.0 +geomet==0.2.1.post1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +kombu==4.2.2.post1 +mock==5.1.0 +mysql-connector-python==9.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +psycopg2-binary==2.9.10 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +pytz==2024.2 +requests==2.32.3 +six==1.17.0 +sortedcontainers==2.4.0 +tomli==2.2.1 +urllib3==2.2.3 +vertica-python==0.6.14 +vine==1.3.0 +zipp==3.21.0 diff --git a/.riot/requirements/152e97f.txt b/.riot/requirements/152e97f.txt new file mode 100644 index 00000000000..973e252ab4f --- /dev/null +++ b/.riot/requirements/152e97f.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/152e97f.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +elasticsearch6==6.8.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/1584f8c.txt b/.riot/requirements/1584f8c.txt new file mode 100644 index 00000000000..602372e9b06 --- /dev/null +++ b/.riot/requirements/1584f8c.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1584f8c.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==4.2.16 +django-configurations==2.5.1 +django-hosts==6.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/164c3ce.txt b/.riot/requirements/164c3ce.txt new file mode 100644 index 00000000000..5acfc83a32e --- /dev/null +++ b/.riot/requirements/164c3ce.txt @@ -0,0 +1,31 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/164c3ce.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiohttp-jinja2==1.5.1 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/167b853.txt b/.riot/requirements/167b853.txt new file mode 100644 index 00000000000..71aa1ae2587 --- /dev/null +++ b/.riot/requirements/167b853.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/167b853.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/16acf84.txt b/.riot/requirements/16acf84.txt new file mode 100644 index 00000000000..402495f9654 --- /dev/null +++ b/.riot/requirements/16acf84.txt @@ -0,0 +1,27 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16acf84.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==3.2.25 +django-configurations==2.5.1 +djangorestframework==3.11.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +pytz==2024.2 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 diff --git a/.riot/requirements/16b7aa5.txt b/.riot/requirements/16b7aa5.txt index e53e34a8205..f8d2399a7eb 100644 --- a/.riot/requirements/16b7aa5.txt +++ b/.riot/requirements/16b7aa5.txt @@ -8,17 +8,17 @@ attrs==24.2.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mariadb==1.0.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 +pytest==8.3.4 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/16c3b9f.txt b/.riot/requirements/16c3b9f.txt index 1919ccd9e72..ac3ccd42fa3 100644 --- a/.riot/requirements/16c3b9f.txt +++ b/.riot/requirements/16c3b9f.txt @@ -4,33 +4,33 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/16c3b9f.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 backoff==2.2.1 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==4.57 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.5.14 -dnspython==2.6.1 +dnspython==2.7.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 importlib-metadata==6.11.0 iniconfig==2.0.0 jsonpatch==1.33 @@ -40,8 +40,8 @@ langchain-community==0.0.14 langchain-core==0.1.23 langchainplus-sdk==0.0.4 langsmith==0.0.87 -loguru==0.7.2 -marshmallow==3.22.0 +loguru==0.7.3 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -53,31 +53,32 @@ opentracing==2.4.0 packaging==23.2 pinecone-client==2.2.4 pluggy==1.5.0 -psutil==6.0.0 -pydantic==1.10.18 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==1.10.19 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 typing-extensions==4.12.2 typing-inspect==0.9.0 -urllib3==2.2.2 +urllib3==2.2.3 vcrpy==6.0.1 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/16cc321.txt b/.riot/requirements/16cc321.txt new file mode 100644 index 00000000000..e46e05ff1bb --- /dev/null +++ b/.riot/requirements/16cc321.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16cc321.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/16d2d1f.txt b/.riot/requirements/16d2d1f.txt new file mode 100644 index 00000000000..7092a5762ac --- /dev/null +++ b/.riot/requirements/16d2d1f.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16d2d1f.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==2.1.1 +click==8.1.7 +coverage[toml]==7.6.1 +deprecated==1.2.14 +flask==2.1.3 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.0.1 +mock==5.1.0 +opentelemetry-api==1.15.0 +opentelemetry-instrumentation==0.45b0 +opentelemetry-instrumentation-flask==0.45b0 +opentelemetry-instrumentation-wsgi==0.45b0 +opentelemetry-semantic-conventions==0.45b0 +opentelemetry-util-http==0.45b0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.28.1 +sortedcontainers==2.4.0 +urllib3==1.26.20 +werkzeug==2.1.2 +wrapt==1.16.0 +zipp==3.20.2 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/16de9c4.txt b/.riot/requirements/16de9c4.txt new file mode 100644 index 00000000000..ed357be4e45 --- /dev/null +++ b/.riot/requirements/16de9c4.txt @@ -0,0 +1,37 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16de9c4.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiosignal==1.3.1 +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch[async]==8.15.1 +elasticsearch7[async]==7.17.12 +events==0.5 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +opensearch-py[async]==2.7.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-dateutil==2.9.0.post0 +requests==2.32.3 +six==1.16.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 +yarl==1.13.1 diff --git a/.riot/requirements/1761cfc.txt b/.riot/requirements/1761cfc.txt index 4ccba3f60cb..6eb2c9fe558 100644 --- a/.riot/requirements/1761cfc.txt +++ b/.riot/requirements/1761cfc.txt @@ -4,40 +4,40 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/1761cfc.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 @@ -49,49 +49,51 @@ langchain-core==0.2.0 langchain-openai==0.1.7 langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/17879d0.txt b/.riot/requirements/17879d0.txt index 9e7c9459a2b..339e06c1336 100644 --- a/.riot/requirements/17879d0.txt +++ b/.riot/requirements/17879d0.txt @@ -5,18 +5,18 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/17879d0.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pyodbc==5.1.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pyodbc==5.2.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/178f7d5.txt b/.riot/requirements/178f7d5.txt new file mode 100644 index 00000000000..4d7d3e5b6e6 --- /dev/null +++ b/.riot/requirements/178f7d5.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/178f7d5.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +logbook==1.7.0.post0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/17d40ef.txt b/.riot/requirements/17d40ef.txt new file mode 100644 index 00000000000..53c94aadbe1 --- /dev/null +++ b/.riot/requirements/17d40ef.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/17d40ef.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +loguru==0.4.1 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1805689.txt b/.riot/requirements/1805689.txt deleted file mode 100644 index e76e16e1946..00000000000 --- a/.riot/requirements/1805689.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1805689.in -# -amqp==2.6.1 -attrs==23.1.0 -cassandra-driver==3.28.0 -click==8.1.7 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -future==0.18.3 -geomet==0.2.1.post1 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -kombu==4.2.2.post1 -mock==5.1.0 -mysql-connector-python==8.2.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -protobuf==4.21.12 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-dateutil==2.8.2 -pytz==2023.3.post1 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -vertica-python==0.6.14 -vine==1.3.0 -zipp==3.17.0 diff --git a/.riot/requirements/1810da7.txt b/.riot/requirements/1810da7.txt index 4c80eec5a83..020c016edce 100644 --- a/.riot/requirements/1810da7.txt +++ b/.riot/requirements/1810da7.txt @@ -8,17 +8,17 @@ attrs==24.2.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 pyodbc==4.0.39 -pytest==8.3.2 +pytest==8.3.4 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/1819cb6.txt b/.riot/requirements/1819cb6.txt new file mode 100644 index 00000000000..0c9e45ced2c --- /dev/null +++ b/.riot/requirements/1819cb6.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1819cb6.in +# +attrs==24.2.0 +blinker==1.8.2 +click==7.1.2 +coverage[toml]==7.6.1 +flask==1.1.4 +flask-caching==1.10.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +itsdangerous==1.1.0 +jinja2==2.11.3 +markupsafe==1.1.1 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-memcached==1.62 +redis==5.1.1 +sortedcontainers==2.4.0 +werkzeug==1.0.1 diff --git a/.riot/requirements/188244e.txt b/.riot/requirements/188244e.txt new file mode 100644 index 00000000000..7a30a1a4b8e --- /dev/null +++ b/.riot/requirements/188244e.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/188244e.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/188a403.txt b/.riot/requirements/188a403.txt index 62354b22cef..37541506b8d 100644 --- a/.riot/requirements/188a403.txt +++ b/.riot/requirements/188a403.txt @@ -5,20 +5,20 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/188a403.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 pyodbc==4.0.39 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/18bc2ac.txt b/.riot/requirements/18bc2ac.txt index e3f60b3aad2..aaf19bf9fe2 100644 --- a/.riot/requirements/18bc2ac.txt +++ b/.riot/requirements/18bc2ac.txt @@ -4,98 +4,101 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/18bc2ac.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohttp==3.9.5 aiosignal==1.3.1 annotated-types==0.7.0 -anthropic==0.34.2 -anyio==4.4.0 +anthropic==0.40.0 +anyio==4.7.0 attrs==24.2.0 -boto3==1.34.162 -botocore==1.34.162 +boto3==1.35.76 +botocore==1.35.76 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 -langchain==0.2.16 -langchain-anthropic==0.1.23 -langchain-aws==0.1.18 -langchain-cohere==0.2.4 -langchain-community==0.2.16 -langchain-core==0.2.39 -langchain-experimental==0.0.65 -langchain-openai==0.1.23 -langchain-pinecone==0.1.3 -langchain-text-splitters==0.2.4 -langsmith==0.1.117 -marshmallow==3.22.0 +langchain==0.3.10 +langchain-anthropic==0.3.0 +langchain-aws==0.2.9 +langchain-cohere==0.3.3 +langchain-community==0.3.10 +langchain-core==0.3.22 +langchain-experimental==0.3.3 +langchain-openai==0.2.11 +langchain-pinecone==0.2.0 +langchain-text-splitters==0.3.2 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 -packaging==24.1 -pandas==2.2.2 +orjson==3.10.12 +packaging==24.2 +pandas==2.2.3 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pydantic-settings==2.6.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-dotenv==1.0.1 +pytz==2024.2 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tabulate==0.9.0 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 -tzdata==2024.1 -urllib3==2.2.2 +tzdata==2024.2 +urllib3==2.2.3 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/18c6e70.txt b/.riot/requirements/18c6e70.txt new file mode 100644 index 00000000000..f257d8ded2b --- /dev/null +++ b/.riot/requirements/18c6e70.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/18c6e70.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/18e9526.txt b/.riot/requirements/18e9526.txt new file mode 100644 index 00000000000..ce6bddab69f --- /dev/null +++ b/.riot/requirements/18e9526.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/18e9526.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +events==0.5 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opensearch-py[requests]==2.7.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-dateutil==2.9.0.post0 +requests==2.32.3 +six==1.16.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/192c7c0.txt b/.riot/requirements/192c7c0.txt new file mode 100644 index 00000000000..15f53062f83 --- /dev/null +++ b/.riot/requirements/192c7c0.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/192c7c0.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elasticsearch==7.17.12 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/19bbf6d.txt b/.riot/requirements/19bbf6d.txt new file mode 100644 index 00000000000..1e31a198638 --- /dev/null +++ b/.riot/requirements/19bbf6d.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/19bbf6d.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +dnspython==2.7.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mongoengine==0.29.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymongo==4.10.1 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/19f2225.txt b/.riot/requirements/19f2225.txt index dc86f7981bf..63df4f55d90 100644 --- a/.riot/requirements/19f2225.txt +++ b/.riot/requirements/19f2225.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/19f2225.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==5.4.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 faiss-cpu==1.8.0 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 jmespath==1.0.1 jsonpatch==1.33 @@ -50,8 +50,8 @@ langchain-core==0.1.52 langchain-openai==0.1.6 langchain-pinecone==0.1.0 langchain-text-splitters==0.0.2 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -59,37 +59,39 @@ numexpr==2.8.5 numpy==1.26.4 openai==1.30.3 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 pinecone-client==3.2.2 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 +tiktoken==0.8.0 tokenizers==0.19.1 -tomli==2.0.1 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tomli==2.2.1 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/1a485c9.txt b/.riot/requirements/1a485c9.txt new file mode 100644 index 00000000000..558f2540488 --- /dev/null +++ b/.riot/requirements/1a485c9.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1a485c9.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +decorator==5.1.1 +dogpile-cache==1.3.3 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pbr==6.1.0 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +stevedore==5.3.0 diff --git a/.riot/requirements/1a508dc.txt b/.riot/requirements/1a508dc.txt new file mode 100644 index 00000000000..6e2dfecef5e --- /dev/null +++ b/.riot/requirements/1a508dc.txt @@ -0,0 +1,30 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1a508dc.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==3.2.25 +django-configurations==2.5.1 +django-hosts==4.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +pytz==2024.2 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/1acabe0.txt b/.riot/requirements/1acabe0.txt new file mode 100644 index 00000000000..0f106bcd2dc --- /dev/null +++ b/.riot/requirements/1acabe0.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1acabe0.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1ada88e.txt b/.riot/requirements/1ada88e.txt new file mode 100644 index 00000000000..5fc0aa5664d --- /dev/null +++ b/.riot/requirements/1ada88e.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ada88e.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==4.2.16 +django-configurations==2.5.1 +django-hosts==5.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/1aed5dc.txt b/.riot/requirements/1aed5dc.txt new file mode 100644 index 00000000000..4d8f8858d78 --- /dev/null +++ b/.riot/requirements/1aed5dc.txt @@ -0,0 +1,30 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1aed5dc.in +# +attrs==24.2.0 +blinker==1.8.2 +cachelib==0.9.0 +click==7.1.2 +coverage[toml]==7.6.1 +flask==1.1.4 +flask-caching==2.3.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +itsdangerous==1.1.0 +jinja2==2.11.3 +markupsafe==1.1.1 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-memcached==1.62 +redis==5.1.1 +sortedcontainers==2.4.0 +werkzeug==1.0.1 diff --git a/.riot/requirements/1af9cfa.txt b/.riot/requirements/1af9cfa.txt index 3a796251a8b..7a5efd36134 100644 --- a/.riot/requirements/1af9cfa.txt +++ b/.riot/requirements/1af9cfa.txt @@ -5,18 +5,18 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/1af9cfa.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 pyodbc==4.0.39 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/1b86c06.txt b/.riot/requirements/1b86c06.txt new file mode 100644 index 00000000000..68de1371257 --- /dev/null +++ b/.riot/requirements/1b86c06.txt @@ -0,0 +1,27 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1b86c06.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==0.12.3 +httpx==0.17.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +rfc3986[idna2008]==1.5.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1b8d922.txt b/.riot/requirements/1b8d922.txt new file mode 100644 index 00000000000..76a225cb035 --- /dev/null +++ b/.riot/requirements/1b8d922.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1b8d922.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mako==1.1.6 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1ba390a.txt b/.riot/requirements/1ba390a.txt new file mode 100644 index 00000000000..71d341c1fbb --- /dev/null +++ b/.riot/requirements/1ba390a.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ba390a.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +decorator==5.1.1 +dogpile-cache==0.9.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1bf3da5.txt b/.riot/requirements/1bf3da5.txt index 24b990913fb..ffd311eb163 100644 --- a/.riot/requirements/1bf3da5.txt +++ b/.riot/requirements/1bf3da5.txt @@ -8,17 +8,17 @@ attrs==24.2.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 +pytest==8.3.4 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/1bf4d76.txt b/.riot/requirements/1bf4d76.txt new file mode 100644 index 00000000000..be2efe8e43c --- /dev/null +++ b/.riot/requirements/1bf4d76.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1bf4d76.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +decorator==5.1.1 +dogpile-cache==1.3.3 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pbr==6.1.0 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +stevedore==5.3.0 diff --git a/.riot/requirements/1c22cf9.txt b/.riot/requirements/1c22cf9.txt new file mode 100644 index 00000000000..091cd98d529 --- /dev/null +++ b/.riot/requirements/1c22cf9.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c22cf9.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pylibmc==1.6.3 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1cb554e.txt b/.riot/requirements/1cb554e.txt new file mode 100644 index 00000000000..27f518b59cc --- /dev/null +++ b/.riot/requirements/1cb554e.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1cb554e.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymemcache==3.4.4 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1ce0711.txt b/.riot/requirements/1ce0711.txt new file mode 100644 index 00000000000..6721b5e5b0b --- /dev/null +++ b/.riot/requirements/1ce0711.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ce0711.in +# +attrs==24.2.0 +beautifulsoup4==4.12.3 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +soupsieve==2.6 +waitress==3.0.0 +webob==1.8.8 +webtest==3.0.1 diff --git a/.riot/requirements/1ce93b3.txt b/.riot/requirements/1ce93b3.txt new file mode 100644 index 00000000000..a0edba9ffd0 --- /dev/null +++ b/.riot/requirements/1ce93b3.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ce93b3.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +dnspython==2.7.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mongoengine==0.29.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymongo==4.8.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1d74d67.txt b/.riot/requirements/1d74d67.txt new file mode 100644 index 00000000000..32873cff656 --- /dev/null +++ b/.riot/requirements/1d74d67.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d74d67.in +# +aniso8601==9.0.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +graphene==3.3 +graphql-core==3.2.4 +graphql-relay==3.2.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1d8a93c.txt b/.riot/requirements/1d8a93c.txt new file mode 100644 index 00000000000..54f5d2a96c9 --- /dev/null +++ b/.riot/requirements/1d8a93c.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d8a93c.in +# +aiosqlite==0.17.0 +annotated-types==0.7.0 +attrs==24.2.0 +blinker==1.8.2 +bytecode==0.15.1 +cattrs==22.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==8.1.7 +coverage[toml]==7.6.1 +envier==0.5.2 +flask==3.0.3 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +iso8601==1.1.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +peewee==3.17.6 +pluggy==1.5.0 +pony==0.7.19 +protobuf==5.28.2 +pycryptodome==3.21.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pypika-tortoise==0.1.6 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytz==2024.2 +requests==2.32.3 +sortedcontainers==2.4.0 +sqlalchemy==2.0.35 +tortoise-orm==0.21.6 +typing-extensions==4.12.2 +urllib3==2.2.3 +werkzeug==3.0.4 +xmltodict==0.13.0 diff --git a/.riot/requirements/1dd5678.txt b/.riot/requirements/1dd5678.txt new file mode 100644 index 00000000000..c3ed6ec2447 --- /dev/null +++ b/.riot/requirements/1dd5678.txt @@ -0,0 +1,30 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1dd5678.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +gevent==24.2.1 +greenlet==3.1.1 +httpretty==1.1.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pyfakefs==5.6.0 +pytest==8.3.3 +pytest-asyncio==0.23.8 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-json-logger==2.0.7 +sortedcontainers==2.4.0 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/15e6ff4.txt b/.riot/requirements/1df4764.txt similarity index 65% rename from .riot/requirements/15e6ff4.txt rename to .riot/requirements/1df4764.txt index 205310cd885..d6cef24569d 100644 --- a/.riot/requirements/15e6ff4.txt +++ b/.riot/requirements/1df4764.txt @@ -1,21 +1,21 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/15e6ff4.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1df4764.in # annotated-types==0.7.0 -anyio==4.6.2.post1 +anyio==4.7.0 attrs==24.2.0 -boto3==1.35.62 -botocore==1.35.62 +boto3==1.35.78 +botocore==1.35.78 certifi==2024.8.30 -coverage[toml]==7.6.7 -fastapi==0.115.5 +coverage[toml]==7.6.9 +fastapi==0.115.6 h11==0.14.0 httpcore==1.0.7 httpretty==1.1.4 -httpx==0.27.2 +httpx==0.28.1 hypothesis==6.45.0 idna==3.10 iniconfig==2.0.0 @@ -25,22 +25,22 @@ msgpack==1.1.0 opentracing==2.4.0 packaging==24.2 pluggy==1.5.0 -pydantic==2.9.2 -pydantic-core==2.23.4 -pytest==8.3.3 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.16.0 python-dateutil==2.9.0.post0 -s3transfer==0.10.3 -six==1.16.0 +s3transfer==0.10.4 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -starlette==0.41.2 +starlette==0.41.3 structlog==24.4.0 typing-extensions==4.12.2 urllib3==2.2.3 -wheel==0.45.0 +wheel==0.45.1 # The following packages are considered to be unsafe in a requirements file: -setuptools==75.5.0 +setuptools==75.6.0 diff --git a/.riot/requirements/1e0ec0b.txt b/.riot/requirements/1e0ec0b.txt index 26f75087ae7..5f5ddcf3598 100644 --- a/.riot/requirements/1e0ec0b.txt +++ b/.riot/requirements/1e0ec0b.txt @@ -5,18 +5,18 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/1e0ec0b.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/1e19c17.txt b/.riot/requirements/1e19c17.txt new file mode 100644 index 00000000000..615658928e1 --- /dev/null +++ b/.riot/requirements/1e19c17.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e19c17.in +# +anyio==4.6.0 +asgiref==3.0.0 +async-timeout==3.0.1 +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==1.0.6 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1e4bb51.txt b/.riot/requirements/1e4bb51.txt new file mode 100644 index 00000000000..c160a2df5e6 --- /dev/null +++ b/.riot/requirements/1e4bb51.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e4bb51.in +# +aniso8601==9.0.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +graphene==3.0 +graphql-core==3.1.7 +graphql-relay==3.1.5 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1e4dfe1.txt b/.riot/requirements/1e4dfe1.txt new file mode 100644 index 00000000000..11f08da5171 --- /dev/null +++ b/.riot/requirements/1e4dfe1.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e4dfe1.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/1e62aea.txt b/.riot/requirements/1e62aea.txt new file mode 100644 index 00000000000..4a152a7b448 --- /dev/null +++ b/.riot/requirements/1e62aea.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e62aea.in +# +attrs==24.2.0 +azure-functions==1.21.3 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.6.8 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +tomli==2.1.0 +urllib3==2.2.3 diff --git a/.riot/requirements/1e659c4.txt b/.riot/requirements/1e659c4.txt new file mode 100644 index 00000000000..ef8e4a09e09 --- /dev/null +++ b/.riot/requirements/1e659c4.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e659c4.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymemcache==4.0.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1e70094.txt b/.riot/requirements/1e70094.txt new file mode 100644 index 00000000000..ac90db74765 --- /dev/null +++ b/.riot/requirements/1e70094.txt @@ -0,0 +1,42 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e70094.in +# +attrs==24.2.0 +beautifulsoup4==4.12.3 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +hupper==1.12.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pastedeploy==3.1.0 +plaster==1.1.2 +plaster-pastedeploy==1.0.1 +pluggy==1.5.0 +pserve-test-app @ file:///root/project/tests/contrib/pyramid/pserve_app +pyramid==2.0.2 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +soupsieve==2.6 +translationstring==1.4 +urllib3==2.2.3 +venusian==3.1.0 +waitress==3.0.0 +webob==1.8.8 +webtest==3.0.1 +zope-deprecation==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/1ebb239.txt b/.riot/requirements/1ebb239.txt new file mode 100644 index 00000000000..baa97737f91 --- /dev/null +++ b/.riot/requirements/1ebb239.txt @@ -0,0 +1,35 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ebb239.in +# +attrs==24.2.0 +autocommand==2.2.2 +cheroot==10.0.1 +cherrypy==18.10.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +jaraco-collections==5.1.0 +jaraco-context==6.0.1 +jaraco-functools==4.1.0 +jaraco-text==4.0.0 +mock==5.1.0 +more-itertools==8.10.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +portend==3.2.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-dateutil==2.9.0.post0 +six==1.16.0 +sortedcontainers==2.4.0 +tempora==5.7.0 +zc-lockfile==3.0.post1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/1ec1dbf.txt b/.riot/requirements/1ec1dbf.txt index 3f7fffea275..0a093e6e676 100644 --- a/.riot/requirements/1ec1dbf.txt +++ b/.riot/requirements/1ec1dbf.txt @@ -4,100 +4,103 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/1ec1dbf.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohttp==3.9.5 aiosignal==1.3.1 annotated-types==0.7.0 -anthropic==0.34.2 -anyio==4.4.0 +anthropic==0.40.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 -boto3==1.34.162 -botocore==1.34.162 +boto3==1.35.76 +botocore==1.35.76 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 -langchain==0.2.16 -langchain-anthropic==0.1.23 -langchain-aws==0.1.18 -langchain-cohere==0.2.4 -langchain-community==0.2.16 -langchain-core==0.2.39 -langchain-experimental==0.0.65 -langchain-openai==0.1.23 -langchain-pinecone==0.1.3 -langchain-text-splitters==0.2.4 -langsmith==0.1.117 -marshmallow==3.22.0 +langchain==0.3.10 +langchain-anthropic==0.3.0 +langchain-aws==0.2.9 +langchain-cohere==0.3.3 +langchain-community==0.3.10 +langchain-core==0.3.22 +langchain-experimental==0.3.3 +langchain-openai==0.2.11 +langchain-pinecone==0.2.0 +langchain-text-splitters==0.3.2 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 -packaging==24.1 -pandas==2.2.2 +orjson==3.10.12 +packaging==24.2 +pandas==2.2.3 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pydantic-settings==2.6.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-dotenv==1.0.1 +pytz==2024.2 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tabulate==0.9.0 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 -tzdata==2024.1 -urllib3==2.2.2 +tzdata==2024.2 +urllib3==2.2.3 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/1ec9462.txt b/.riot/requirements/1ec9462.txt new file mode 100644 index 00000000000..da918b276a7 --- /dev/null +++ b/.riot/requirements/1ec9462.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ec9462.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.0.0 diff --git a/.riot/requirements/1ef773e.txt b/.riot/requirements/1ef773e.txt index 16dcedbeacf..88ce7283fd9 100644 --- a/.riot/requirements/1ef773e.txt +++ b/.riot/requirements/1ef773e.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/1ef773e.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pyodbc==5.1.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pyodbc==5.2.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/1f2ab25.txt b/.riot/requirements/1f2ab25.txt index eb17561c98f..ee70e55666e 100644 --- a/.riot/requirements/1f2ab25.txt +++ b/.riot/requirements/1f2ab25.txt @@ -2,25 +2,25 @@ # This file is autogenerated by pip-compile with Python 3.8 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1f2ab25.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1f2ab25.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 -exceptiongroup==1.2.0 +async-timeout==5.0.1 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.1 +exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==7.0.1 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==5.0.0 +pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/1f3b209.txt b/.riot/requirements/1f3b209.txt new file mode 100644 index 00000000000..ed48c26f9b8 --- /dev/null +++ b/.riot/requirements/1f3b209.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1f3b209.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mariadb==1.1.10 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1fa3005.txt b/.riot/requirements/1fa3005.txt new file mode 100644 index 00000000000..d05c2537930 --- /dev/null +++ b/.riot/requirements/1fa3005.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fa3005.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +jinja2==3.0.3 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1fc9ecc.txt b/.riot/requirements/1fc9ecc.txt new file mode 100644 index 00000000000..f4245743dde --- /dev/null +++ b/.riot/requirements/1fc9ecc.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fc9ecc.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mariadb==1.1.10 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/1fe8dd2.txt b/.riot/requirements/1fe8dd2.txt new file mode 100644 index 00000000000..c6356e47072 --- /dev/null +++ b/.riot/requirements/1fe8dd2.txt @@ -0,0 +1,83 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/1fe8dd2.in +# +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 +aiosignal==1.3.1 +annotated-types==0.7.0 +anyio==4.7.0 +appdirs==1.4.4 +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.6.9 +dataclasses-json==0.6.7 +datasets==3.2.0 +dill==0.3.8 +distro==1.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec[http]==2024.9.0 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +huggingface-hub==0.26.5 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jiter==0.8.2 +jsonpatch==1.33 +jsonpointer==3.0.0 +langchain==0.2.17 +langchain-community==0.2.19 +langchain-core==0.2.43 +langchain-openai==0.1.25 +langchain-text-splitters==0.2.4 +langsmith==0.1.147 +marshmallow==3.23.1 +mock==5.1.0 +multidict==6.1.0 +multiprocess==0.70.16 +mypy-extensions==1.0.0 +nest-asyncio==1.6.0 +numpy==1.26.4 +openai==1.57.2 +opentracing==2.4.0 +orjson==3.10.12 +packaging==24.2 +pandas==2.2.3 +pluggy==1.5.0 +propcache==0.2.1 +pyarrow==18.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pysbd==0.3.4 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +python-dateutil==2.9.0.post0 +pytz==2024.2 +pyyaml==6.0.2 +ragas==0.1.21 +regex==2024.11.6 +requests==2.32.3 +requests-toolbelt==1.0.0 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +sqlalchemy==2.0.36 +tenacity==8.5.0 +tiktoken==0.8.0 +tqdm==4.67.1 +typing-extensions==4.12.2 +typing-inspect==0.9.0 +tzdata==2024.2 +urllib3==2.2.3 +vcrpy==6.0.2 +wrapt==1.17.0 +xxhash==3.5.0 +yarl==1.18.3 diff --git a/.riot/requirements/248da41.txt b/.riot/requirements/248da41.txt new file mode 100644 index 00000000000..34d903b5cbf --- /dev/null +++ b/.riot/requirements/248da41.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/248da41.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +docker==7.1.0 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/2538ed0.txt b/.riot/requirements/2538ed0.txt new file mode 100644 index 00000000000..f3d631a3ba0 --- /dev/null +++ b/.riot/requirements/2538ed0.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/2538ed0.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch==8.0.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/2581b3a.txt b/.riot/requirements/2581b3a.txt new file mode 100644 index 00000000000..b0fbf422fae --- /dev/null +++ b/.riot/requirements/2581b3a.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/2581b3a.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mysql-connector-python==9.0.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/2644218.txt b/.riot/requirements/2644218.txt new file mode 100644 index 00000000000..0af7a95877a --- /dev/null +++ b/.riot/requirements/2644218.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/2644218.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +httpretty==1.1.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.24.0 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +sortedcontainers==2.4.0 +typing-extensions==4.12.2 diff --git a/.riot/requirements/27d0ff8.txt b/.riot/requirements/27d0ff8.txt new file mode 100644 index 00000000000..291fe50cacc --- /dev/null +++ b/.riot/requirements/27d0ff8.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/27d0ff8.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mako==1.3.5 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/27e3d7b.txt b/.riot/requirements/27e3d7b.txt new file mode 100644 index 00000000000..602a0f0c52d --- /dev/null +++ b/.riot/requirements/27e3d7b.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/27e3d7b.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +graphql-core==3.2.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/2d6c3d0.txt b/.riot/requirements/2d6c3d0.txt new file mode 100644 index 00000000000..a2b00eb5c7c --- /dev/null +++ b/.riot/requirements/2d6c3d0.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/2d6c3d0.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/2dd0811.txt b/.riot/requirements/2dd0811.txt new file mode 100644 index 00000000000..ecd42e076bd --- /dev/null +++ b/.riot/requirements/2dd0811.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/2dd0811.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +graphql-core==3.2.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/3ab519c.txt b/.riot/requirements/3ab519c.txt new file mode 100644 index 00000000000..fd80ad8e698 --- /dev/null +++ b/.riot/requirements/3ab519c.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/3ab519c.in +# +anyio==4.6.0 +asgiref==3.8.1 +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==1.0.6 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/3b804dc.txt b/.riot/requirements/3b804dc.txt new file mode 100644 index 00000000000..aa60e7c9491 --- /dev/null +++ b/.riot/requirements/3b804dc.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/3b804dc.in +# +anyio==4.6.0 +asgiref==3.8.1 +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==1.0.6 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/3c3f295.txt b/.riot/requirements/3c3f295.txt new file mode 100644 index 00000000000..c97658e408e --- /dev/null +++ b/.riot/requirements/3c3f295.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/3c3f295.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch8==8.0.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/3dd53da.txt b/.riot/requirements/3dd53da.txt new file mode 100644 index 00000000000..088ac0ddd7e --- /dev/null +++ b/.riot/requirements/3dd53da.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/3dd53da.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +dnspython==2.7.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mongoengine==0.29.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymongo==4.8.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/3f1be84.txt b/.riot/requirements/3f1be84.txt new file mode 100644 index 00000000000..fb754701b3b --- /dev/null +++ b/.riot/requirements/3f1be84.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/3f1be84.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch8==8.15.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/1edf426.txt b/.riot/requirements/4132bce.txt similarity index 70% rename from .riot/requirements/1edf426.txt rename to .riot/requirements/4132bce.txt index 56a5eb28b4d..b27023913a3 100644 --- a/.riot/requirements/1edf426.txt +++ b/.riot/requirements/4132bce.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1edf426.in +# pip-compile --no-annotate .riot/requirements/4132bce.in # attrs==24.2.0 -coverage[toml]==7.6.4 -gevent==24.11.1 +coverage[toml]==7.6.9 +gevent==23.9.1 greenlet==3.1.1 hypothesis==6.45.0 iniconfig==2.0.0 @@ -14,13 +14,13 @@ mock==5.1.0 opentracing==2.4.0 packaging==24.2 pluggy==1.5.0 -pytest==8.3.3 +pytest==8.3.4 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.16.0 sortedcontainers==2.4.0 zope-event==5.0 -zope-interface==7.1.1 +zope-interface==7.2 # The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 +# setuptools diff --git a/.riot/requirements/44eeaa9.txt b/.riot/requirements/44eeaa9.txt new file mode 100644 index 00000000000..138f4161595 --- /dev/null +++ b/.riot/requirements/44eeaa9.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/44eeaa9.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/457db9b.txt b/.riot/requirements/457db9b.txt index 28def9f2321..12da4c338c5 100644 --- a/.riot/requirements/457db9b.txt +++ b/.riot/requirements/457db9b.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/457db9b.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 @@ -48,48 +48,50 @@ langchain-core==0.2.0 langchain-openai==0.1.7 langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/498209d.txt b/.riot/requirements/498209d.txt new file mode 100644 index 00000000000..b975548628c --- /dev/null +++ b/.riot/requirements/498209d.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/498209d.in +# +attrs==24.3.0 +coverage[toml]==7.6.9 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +py-cpuinfo==9.0.0 +pytest==8.3.4 +pytest-benchmark==4.0.0 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/4c87f15.txt b/.riot/requirements/4c87f15.txt index e5138ff00ca..1ab11af15de 100644 --- a/.riot/requirements/4c87f15.txt +++ b/.riot/requirements/4c87f15.txt @@ -2,21 +2,20 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/4c87f15.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/4c87f15.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/4d1fa34.txt b/.riot/requirements/4d1fa34.txt new file mode 100644 index 00000000000..e4b768d197a --- /dev/null +++ b/.riot/requirements/4d1fa34.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/4d1fa34.in +# +attrs==24.2.0 +coverage[toml]==7.6.9 +hypothesis==6.45.0 +iniconfig==2.0.0 +lxml==5.3.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +ruamel-yaml==0.18.6 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/4ed631d.txt b/.riot/requirements/4ed631d.txt index b627027f63d..a63d5635068 100644 --- a/.riot/requirements/4ed631d.txt +++ b/.riot/requirements/4ed631d.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/4ed631d.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/4fd1520.txt b/.riot/requirements/4fd1520.txt new file mode 100644 index 00000000000..88c1fc5703a --- /dev/null +++ b/.riot/requirements/4fd1520.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/4fd1520.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +decorator==5.1.1 +dogpile-cache==1.3.3 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pbr==6.1.0 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +stevedore==5.3.0 diff --git a/.riot/requirements/55a4977.txt b/.riot/requirements/55a4977.txt index d2fdde55602..9c65a62f94a 100644 --- a/.riot/requirements/55a4977.txt +++ b/.riot/requirements/55a4977.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/55a4977.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 @@ -48,48 +48,50 @@ langchain-core==0.2.0 langchain-openai==0.1.7 langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/585e779.txt b/.riot/requirements/585e779.txt index 2429e3e442d..3e328720bd3 100644 --- a/.riot/requirements/585e779.txt +++ b/.riot/requirements/585e779.txt @@ -4,32 +4,32 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/585e779.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 -anyio==4.4.0 +anyio==4.7.0 attrs==24.2.0 backoff==2.2.1 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==4.57 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.5.14 -dnspython==2.6.1 +dnspython==2.7.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 importlib-metadata==6.11.0 iniconfig==2.0.0 jsonpatch==1.33 @@ -39,8 +39,8 @@ langchain-community==0.0.14 langchain-core==0.1.23 langchainplus-sdk==0.0.4 langsmith==0.0.87 -loguru==0.7.2 -marshmallow==3.22.0 +loguru==0.7.3 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -52,30 +52,31 @@ opentracing==2.4.0 packaging==23.2 pinecone-client==2.2.4 pluggy==1.5.0 -psutil==6.0.0 -pydantic==1.10.18 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==1.10.19 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tqdm==4.66.5 +tiktoken==0.8.0 +tokenizers==0.21.0 +tqdm==4.67.1 typing-extensions==4.12.2 typing-inspect==0.9.0 -urllib3==2.2.2 +urllib3==2.2.3 vcrpy==6.0.1 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/5b922fc.txt b/.riot/requirements/5b922fc.txt new file mode 100644 index 00000000000..ff7fa5e6ba6 --- /dev/null +++ b/.riot/requirements/5b922fc.txt @@ -0,0 +1,45 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/5b922fc.in +# +asgiref==3.8.1 +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==2.1.1 +click==7.1.2 +coverage[toml]==7.6.1 +flask==1.1.4 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +itsdangerous==1.1.0 +jinja2==2.11.3 +markupsafe==2.0.1 +mock==5.1.0 +opentelemetry-api==1.0.0 +opentelemetry-instrumentation==0.19b0 +opentelemetry-instrumentation-flask==0.19b0 +opentelemetry-instrumentation-wsgi==0.19b0 +opentelemetry-util-http==0.19b0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.28.1 +sortedcontainers==2.4.0 +urllib3==1.26.20 +werkzeug==1.0.1 +wrapt==1.16.0 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/6cf373b.txt b/.riot/requirements/6cf373b.txt new file mode 100644 index 00000000000..e69fda1f1ed --- /dev/null +++ b/.riot/requirements/6cf373b.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6cf373b.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/6ebd15f.txt b/.riot/requirements/6ebd15f.txt index e15eb9136da..8665e48a87a 100644 --- a/.riot/requirements/6ebd15f.txt +++ b/.riot/requirements/6ebd15f.txt @@ -2,24 +2,24 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/6ebd15f.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6ebd15f.in # asyncpg==0.23.0 -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 +attrs==24.2.0 +coverage[toml]==7.6.9 +exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.2.2 +pytest==8.3.4 pytest-asyncio==0.21.2 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.19.2 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/70e034f.txt b/.riot/requirements/70e034f.txt new file mode 100644 index 00000000000..12950d5019e --- /dev/null +++ b/.riot/requirements/70e034f.txt @@ -0,0 +1,24 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/70e034f.in +# +attrs==24.2.0 +cattrs==22.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +molten==1.0.2 +mypy-extensions==1.0.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +typing-extensions==3.10.0.2 +typing-inspect==0.6.0 diff --git a/.riot/requirements/73109d5.txt b/.riot/requirements/73109d5.txt new file mode 100644 index 00000000000..42b5dd0e30c --- /dev/null +++ b/.riot/requirements/73109d5.txt @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pip-compile with Python 3.7 +# by the following command: +# +# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/73109d5.in +# +attrs==24.2.0 +azure-functions==1.21.3 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.2.7 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==6.7.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.0 +pluggy==1.2.0 +pytest==7.4.4 +pytest-cov==4.1.0 +pytest-mock==3.11.1 +requests==2.31.0 +sortedcontainers==2.4.0 +tomli==2.0.1 +typing-extensions==4.7.1 +urllib3==2.0.7 +zipp==3.15.0 diff --git a/.riot/requirements/74ccb83.txt b/.riot/requirements/74ccb83.txt new file mode 100644 index 00000000000..9a3462b41cd --- /dev/null +++ b/.riot/requirements/74ccb83.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/74ccb83.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/769aa27.txt b/.riot/requirements/769aa27.txt index be5205a4bd0..3b3c8a013dd 100644 --- a/.riot/requirements/769aa27.txt +++ b/.riot/requirements/769aa27.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/769aa27.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/788c304.txt b/.riot/requirements/788c304.txt new file mode 100644 index 00000000000..36e1cd013d9 --- /dev/null +++ b/.riot/requirements/788c304.txt @@ -0,0 +1,27 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/788c304.in +# +anyio==4.6.0 +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==1.0.6 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/7a40e08.txt b/.riot/requirements/7a40e08.txt new file mode 100644 index 00000000000..a770877b6ee --- /dev/null +++ b/.riot/requirements/7a40e08.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/7a40e08.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elasticsearch7==7.13.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/921bc6c.txt b/.riot/requirements/7bbf828.txt similarity index 65% rename from .riot/requirements/921bc6c.txt rename to .riot/requirements/7bbf828.txt index fd44244070f..e1c39713bce 100644 --- a/.riot/requirements/921bc6c.txt +++ b/.riot/requirements/7bbf828.txt @@ -1,21 +1,21 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/921bc6c.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/7bbf828.in # annotated-types==0.7.0 -anyio==4.6.2.post1 +anyio==4.7.0 attrs==24.2.0 -boto3==1.35.62 -botocore==1.35.62 +boto3==1.35.78 +botocore==1.35.78 certifi==2024.8.30 -coverage[toml]==7.6.7 -fastapi==0.115.5 +coverage[toml]==7.6.9 +fastapi==0.115.6 h11==0.14.0 httpcore==1.0.7 httpretty==1.1.4 -httpx==0.27.2 +httpx==0.28.1 hypothesis==6.45.0 idna==3.10 iniconfig==2.0.0 @@ -25,22 +25,22 @@ msgpack==1.1.0 opentracing==2.4.0 packaging==24.2 pluggy==1.5.0 -pydantic==2.9.2 -pydantic-core==2.23.4 -pytest==8.3.3 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.16.0 python-dateutil==2.9.0.post0 -s3transfer==0.10.3 -six==1.16.0 +s3transfer==0.10.4 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -starlette==0.41.2 +starlette==0.41.3 structlog==24.4.0 typing-extensions==4.12.2 urllib3==2.2.3 -wheel==0.45.0 +wheel==0.45.1 # The following packages are considered to be unsafe in a requirements file: -setuptools==75.5.0 +setuptools==75.6.0 diff --git a/.riot/requirements/85acf6e.txt b/.riot/requirements/85acf6e.txt index 0bb20810181..d5d68c47b81 100644 --- a/.riot/requirements/85acf6e.txt +++ b/.riot/requirements/85acf6e.txt @@ -5,18 +5,18 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/85acf6e.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 mariadb==1.0.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/85c8e30.txt b/.riot/requirements/85c8e30.txt index e24c6ea9c30..4d03cfcf992 100644 --- a/.riot/requirements/85c8e30.txt +++ b/.riot/requirements/85c8e30.txt @@ -2,21 +2,21 @@ # This file is autogenerated by pip-compile with Python 3.7 # by the following command: # -# pip-compile --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/85c8e30.in +# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/85c8e30.in # asyncpg==0.28.0 -attrs==23.2.0 +attrs==24.2.0 coverage[toml]==7.2.7 -exceptiongroup==1.2.0 +exceptiongroup==1.2.2 hypothesis==6.45.0 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 +packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -pytest-asyncio==0.21.1 +pytest-asyncio==0.21.2 pytest-cov==4.1.0 pytest-mock==3.11.1 pytest-randomly==3.12.0 diff --git a/.riot/requirements/8a17cb2.txt b/.riot/requirements/8a17cb2.txt index 3d204884507..f8916e4d431 100644 --- a/.riot/requirements/8a17cb2.txt +++ b/.riot/requirements/8a17cb2.txt @@ -8,17 +8,17 @@ attrs==24.2.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 +pytest==8.3.4 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/8ce955f.txt b/.riot/requirements/8ce955f.txt new file mode 100644 index 00000000000..6a3a0e63588 --- /dev/null +++ b/.riot/requirements/8ce955f.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/8ce955f.in +# +anyio==4.6.0 +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +h11==0.14.0 +httpcore==0.16.3 +httpx==0.23.3 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +rfc3986[idna2008]==1.5.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/91fe586.txt b/.riot/requirements/91fe586.txt new file mode 100644 index 00000000000..46d48acec17 --- /dev/null +++ b/.riot/requirements/91fe586.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/91fe586.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +requests-mock==1.12.1 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/9a07d4a.txt b/.riot/requirements/9a07d4a.txt new file mode 100644 index 00000000000..027306e2816 --- /dev/null +++ b/.riot/requirements/9a07d4a.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/9a07d4a.in +# +amqp==5.2.0 +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +kombu==5.4.2 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +tzdata==2024.2 +vine==5.1.0 diff --git a/.riot/requirements/9a5c0d9.txt b/.riot/requirements/9a5c0d9.txt new file mode 100644 index 00000000000..edab275315a --- /dev/null +++ b/.riot/requirements/9a5c0d9.txt @@ -0,0 +1,32 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/9a5c0d9.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +gevent==24.2.1 +greenlet==3.1.1 +gunicorn==23.0.0 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.2.3 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/9a81f68.txt b/.riot/requirements/9a81f68.txt index de20e1d5784..83a8d9649f8 100644 --- a/.riot/requirements/9a81f68.txt +++ b/.riot/requirements/9a81f68.txt @@ -5,20 +5,20 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/9a81f68.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pyodbc==5.1.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pyodbc==5.2.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/a0cc2a4.txt b/.riot/requirements/a0cc2a4.txt new file mode 100644 index 00000000000..f724ecdac7a --- /dev/null +++ b/.riot/requirements/a0cc2a4.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/a0cc2a4.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymemcache==3.5.2 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/a311bc2.txt b/.riot/requirements/a311bc2.txt index 48ecb036fea..42f18e00ad7 100644 --- a/.riot/requirements/a311bc2.txt +++ b/.riot/requirements/a311bc2.txt @@ -4,103 +4,106 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/a311bc2.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohttp==3.9.5 aiosignal==1.3.1 annotated-types==0.7.0 -anthropic==0.34.2 -anyio==4.4.0 +anthropic==0.40.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 -boto3==1.34.162 -botocore==1.34.162 +boto3==1.35.76 +botocore==1.35.76 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 +idna==3.10 +importlib-metadata==8.5.0 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 -langchain==0.2.16 -langchain-anthropic==0.1.23 -langchain-aws==0.1.18 -langchain-cohere==0.2.4 -langchain-community==0.2.16 -langchain-core==0.2.39 -langchain-experimental==0.0.65 -langchain-openai==0.1.23 -langchain-pinecone==0.1.3 -langchain-text-splitters==0.2.4 -langsmith==0.1.117 -marshmallow==3.22.0 +langchain==0.3.10 +langchain-anthropic==0.3.0 +langchain-aws==0.2.9 +langchain-cohere==0.3.3 +langchain-community==0.3.10 +langchain-core==0.3.22 +langchain-experimental==0.3.3 +langchain-openai==0.2.11 +langchain-pinecone==0.2.0 +langchain-text-splitters==0.3.2 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 -packaging==24.1 -pandas==2.2.2 +orjson==3.10.12 +packaging==24.2 +pandas==2.2.3 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pydantic-settings==2.6.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-dotenv==1.0.1 +pytz==2024.2 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tabulate==0.9.0 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 types-requests==2.31.0.6 types-urllib3==1.26.25.14 typing-extensions==4.12.2 typing-inspect==0.9.0 -tzdata==2024.1 +tzdata==2024.2 urllib3==1.26.20 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/a9f396a.txt b/.riot/requirements/a9f396a.txt new file mode 100644 index 00000000000..4505eee48b0 --- /dev/null +++ b/.riot/requirements/a9f396a.txt @@ -0,0 +1,31 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/a9f396a.in +# +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aiohttp-jinja2==1.6 +aiosignal==1.3.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +frozenlist==1.4.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +multidict==6.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +yarl==1.13.1 diff --git a/.riot/requirements/aa1fe5c.txt b/.riot/requirements/aa1fe5c.txt index 7912c6eec65..bf4c4ba301f 100644 --- a/.riot/requirements/aa1fe5c.txt +++ b/.riot/requirements/aa1fe5c.txt @@ -4,41 +4,41 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/aa1fe5c.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 +idna==3.10 +importlib-metadata==8.5.0 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 @@ -50,51 +50,53 @@ langchain-core==0.2.0 langchain-openai==0.1.7 langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 types-requests==2.31.0.6 types-urllib3==1.26.25.14 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==1.26.20 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/aaf6987.txt b/.riot/requirements/aaf6987.txt index c5cf067fda4..325d23c244d 100644 --- a/.riot/requirements/aaf6987.txt +++ b/.riot/requirements/aaf6987.txt @@ -2,22 +2,22 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/aaf6987.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/aaf6987.in # asyncpg==0.24.0 -attrs==23.2.0 -coverage[toml]==7.4.0 -exceptiongroup==1.2.0 +attrs==24.2.0 +coverage[toml]==7.6.9 +exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/ae8bd25.txt b/.riot/requirements/ae8bd25.txt new file mode 100644 index 00000000000..f0736d28cfc --- /dev/null +++ b/.riot/requirements/ae8bd25.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/ae8bd25.in +# +asgiref==3.8.1 +attrs==24.2.0 +coverage[toml]==7.6.1 +django==4.2.16 +django-configurations==2.5.1 +djangorestframework==3.15.2 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-django[testing]==3.10.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +six==1.16.0 +sortedcontainers==2.4.0 +sqlparse==0.5.1 diff --git a/.riot/requirements/afc1791.txt b/.riot/requirements/afc1791.txt new file mode 100644 index 00000000000..2a3cfd4447d --- /dev/null +++ b/.riot/requirements/afc1791.txt @@ -0,0 +1,27 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/afc1791.in +# +attrs==24.3.0 +coverage[toml]==7.6.9 +gevent==24.11.1 +greenlet==3.1.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +redis==5.2.1 +sortedcontainers==2.4.0 +zope-event==5.0 +zope-interface==7.2 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/.riot/requirements/b29075f.txt b/.riot/requirements/b29075f.txt new file mode 100644 index 00000000000..d070fd9e2f2 --- /dev/null +++ b/.riot/requirements/b29075f.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/b29075f.in +# +annotated-types==0.7.0 +attrs==24.2.0 +blinker==1.8.2 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==8.1.7 +coverage[toml]==7.6.1 +flask==3.0.3 +flask-openapi3==4.0.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +typing-extensions==4.12.2 +urllib3==1.26.20 +werkzeug==3.0.4 +zipp==3.20.2 diff --git a/.riot/requirements/b2ac981.txt b/.riot/requirements/b2ac981.txt deleted file mode 100644 index b9c0f587e1e..00000000000 --- a/.riot/requirements/b2ac981.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/b2ac981.in -# -attrs==23.1.0 -coverage[toml]==7.2.7 -exceptiongroup==1.1.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.1 -pluggy==1.2.0 -pytest==7.4.0 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -ruamel-yaml==0.17.32 -ruamel-yaml-clib==0.2.7 -sortedcontainers==2.4.0 -tomli==2.0.1 diff --git a/.riot/requirements/b403d9d.txt b/.riot/requirements/b403d9d.txt new file mode 100644 index 00000000000..1cb46c6afb0 --- /dev/null +++ b/.riot/requirements/b403d9d.txt @@ -0,0 +1,49 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/b403d9d.in +# +aiobotocore==2.3.1 +aiohappyeyeballs==2.4.3 +aiohttp==3.10.9 +aioitertools==0.12.0 +aiosignal==1.3.1 +attrs==24.2.0 +botocore==1.24.21 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch==8.15.1 +events==0.5 +frozenlist==1.4.1 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jmespath==1.0.1 +mock==5.1.0 +multidict==6.1.0 +opensearch-py==2.7.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pynamodb==5.5.1 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-dateutil==2.9.0.post0 +requests==2.32.3 +six==1.16.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 +wrapt==1.16.0 +yarl==1.13.1 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/b970d9a.txt b/.riot/requirements/b970d9a.txt index 4c48960f797..5b6566c9768 100644 --- a/.riot/requirements/b970d9a.txt +++ b/.riot/requirements/b970d9a.txt @@ -2,21 +2,20 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/b970d9a.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/b970d9a.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/bc5cfa5.txt b/.riot/requirements/bc5cfa5.txt index 6bb0594f42e..69e33321ec8 100644 --- a/.riot/requirements/bc5cfa5.txt +++ b/.riot/requirements/bc5cfa5.txt @@ -2,23 +2,23 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/bc5cfa5.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/bc5cfa5.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 -exceptiongroup==1.2.0 +async-timeout==5.0.1 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.9 +exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/bc64f49.txt b/.riot/requirements/bc64f49.txt new file mode 100644 index 00000000000..ab6f8840549 --- /dev/null +++ b/.riot/requirements/bc64f49.txt @@ -0,0 +1,35 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/bc64f49.in +# +attrs==24.2.0 +autocommand==2.2.2 +cheroot==10.0.1 +cherrypy==18.10.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +jaraco-collections==5.1.0 +jaraco-context==6.0.1 +jaraco-functools==4.1.0 +jaraco-text==4.0.0 +mock==5.1.0 +more-itertools==8.10.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +portend==3.2.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-dateutil==2.9.0.post0 +six==1.16.0 +sortedcontainers==2.4.0 +tempora==5.7.0 +zc-lockfile==3.0.post1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/bc7a1f4.txt b/.riot/requirements/bc7a1f4.txt new file mode 100644 index 00000000000..a73a0ac6da4 --- /dev/null +++ b/.riot/requirements/bc7a1f4.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/bc7a1f4.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +elasticsearch1==1.10.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/bcbec2a.txt b/.riot/requirements/bcbec2a.txt new file mode 100644 index 00000000000..665c0aadc1a --- /dev/null +++ b/.riot/requirements/bcbec2a.txt @@ -0,0 +1,46 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/bcbec2a.in +# +annotated-types==0.7.0 +anyio==4.7.0 +attrs==24.2.0 +boto3==1.35.78 +botocore==1.35.78 +certifi==2024.8.30 +coverage[toml]==7.6.9 +fastapi==0.115.6 +h11==0.14.0 +httpcore==1.0.7 +httpretty==1.1.4 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jmespath==1.0.1 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +s3transfer==0.10.4 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +starlette==0.41.3 +structlog==24.4.0 +typing-extensions==4.12.2 +urllib3==2.2.3 +wheel==0.45.1 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.6.0 diff --git a/.riot/requirements/bebdd41.txt b/.riot/requirements/bebdd41.txt new file mode 100644 index 00000000000..c0918e4e15a --- /dev/null +++ b/.riot/requirements/bebdd41.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/bebdd41.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/c1351c9.txt b/.riot/requirements/c1351c9.txt new file mode 100644 index 00000000000..10e97c081a4 --- /dev/null +++ b/.riot/requirements/c1351c9.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c1351c9.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.23.7 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +redis==5.1.1 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/c2420c2.txt b/.riot/requirements/c2420c2.txt new file mode 100644 index 00000000000..2d6d61d7a79 --- /dev/null +++ b/.riot/requirements/c2420c2.txt @@ -0,0 +1,26 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c2420c2.in +# +attrs==24.2.0 +azure-functions==1.21.3 +certifi==2024.8.30 +charset-normalizer==3.4.0 +coverage[toml]==7.6.8 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +tomli==2.1.0 +urllib3==2.2.3 diff --git a/.riot/requirements/c4d4455.txt b/.riot/requirements/c4d4455.txt new file mode 100644 index 00000000000..1a8b9f970ef --- /dev/null +++ b/.riot/requirements/c4d4455.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c4d4455.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/c4dace8.txt b/.riot/requirements/c4dace8.txt index a6044120570..b828932c4c2 100644 --- a/.riot/requirements/c4dace8.txt +++ b/.riot/requirements/c4dace8.txt @@ -8,17 +8,17 @@ attrs==24.2.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pyodbc==5.1.0 -pytest==8.3.2 +pyodbc==5.2.0 +pytest==8.3.4 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/c77bbb6.txt b/.riot/requirements/c77bbb6.txt new file mode 100644 index 00000000000..3f53bcba5e6 --- /dev/null +++ b/.riot/requirements/c77bbb6.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c77bbb6.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==2.1.1 +click==8.1.7 +coverage[toml]==7.6.1 +deprecated==1.2.14 +flask==2.1.3 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.4.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.0.1 +mock==5.1.0 +opentelemetry-api==1.27.0 +opentelemetry-instrumentation==0.48b0 +opentelemetry-instrumentation-flask==0.48b0 +opentelemetry-instrumentation-wsgi==0.48b0 +opentelemetry-semantic-conventions==0.48b0 +opentelemetry-util-http==0.48b0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.28.1 +sortedcontainers==2.4.0 +urllib3==1.26.20 +werkzeug==2.1.2 +wrapt==1.16.0 +zipp==3.20.2 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/c8b476b.txt b/.riot/requirements/c8b476b.txt new file mode 100644 index 00000000000..d8fd4322d7f --- /dev/null +++ b/.riot/requirements/c8b476b.txt @@ -0,0 +1,32 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c8b476b.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +gevent==24.2.1 +greenlet==3.1.1 +gunicorn==20.0.4 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.2.3 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/ca86aae.txt b/.riot/requirements/ca86aae.txt index f37a682c7bd..be8832160e7 100644 --- a/.riot/requirements/ca86aae.txt +++ b/.riot/requirements/ca86aae.txt @@ -2,25 +2,25 @@ # This file is autogenerated by pip-compile with Python 3.8 # by the following command: # -# pip-compile --no-annotate .riot/requirements/ca86aae.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/ca86aae.in # -async-timeout==4.0.3 -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.4.0 -exceptiongroup==1.2.0 +async-timeout==5.0.1 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.1 +exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==7.0.1 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-asyncio==0.21.2 +pytest-cov==5.0.0 +pytest-mock==3.14.0 pytest-randomly==3.15.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 +tomli==2.2.1 +zipp==3.20.2 diff --git a/.riot/requirements/cbbb0eb.txt b/.riot/requirements/cbbb0eb.txt index 6f976af7055..91e45e45546 100644 --- a/.riot/requirements/cbbb0eb.txt +++ b/.riot/requirements/cbbb0eb.txt @@ -4,98 +4,101 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/cbbb0eb.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohttp==3.9.5 aiosignal==1.3.1 annotated-types==0.7.0 -anthropic==0.34.2 -anyio==4.4.0 +anthropic==0.40.0 +anyio==4.7.0 attrs==24.2.0 -boto3==1.34.162 -botocore==1.34.162 +boto3==1.35.76 +botocore==1.35.76 certifi==2024.8.30 -charset-normalizer==3.3.2 -cohere==5.9.1 -coverage[toml]==7.6.1 +charset-normalizer==3.4.0 +cohere==5.13.3 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 iniconfig==2.0.0 -jiter==0.5.0 +jiter==0.8.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 -langchain==0.2.16 -langchain-anthropic==0.1.23 -langchain-aws==0.1.18 -langchain-cohere==0.2.4 -langchain-community==0.2.16 -langchain-core==0.2.39 -langchain-experimental==0.0.65 -langchain-openai==0.1.23 -langchain-pinecone==0.1.3 -langchain-text-splitters==0.2.4 -langsmith==0.1.117 -marshmallow==3.22.0 +langchain==0.3.10 +langchain-anthropic==0.3.0 +langchain-aws==0.2.9 +langchain-cohere==0.3.3 +langchain-community==0.3.10 +langchain-core==0.3.22 +langchain-experimental==0.3.3 +langchain-openai==0.2.11 +langchain-pinecone==0.2.0 +langchain-text-splitters==0.3.2 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.44.1 +openai==1.57.0 opentracing==2.4.0 -orjson==3.10.7 -packaging==24.1 -pandas==2.2.2 +orjson==3.10.12 +packaging==24.2 +pandas==2.2.3 parameterized==0.9.0 pinecone-client==5.0.1 -pinecone-plugin-inference==1.0.3 +pinecone-plugin-inference==1.1.0 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pydantic-settings==2.6.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 -pytz==2024.1 +python-dotenv==1.0.1 +pytz==2024.2 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tabulate==0.9.0 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tqdm==4.66.5 -types-requests==2.32.0.20240907 +tiktoken==0.8.0 +tokenizers==0.21.0 +tqdm==4.67.1 +types-requests==2.32.0.20241016 typing-extensions==4.12.2 typing-inspect==0.9.0 -tzdata==2024.1 -urllib3==2.2.2 +tzdata==2024.2 +urllib3==2.2.3 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 +wrapt==1.17.0 +yarl==1.18.3 diff --git a/.riot/requirements/cf9bdda.txt b/.riot/requirements/cf9bdda.txt index 943438066ca..d08448d036c 100644 --- a/.riot/requirements/cf9bdda.txt +++ b/.riot/requirements/cf9bdda.txt @@ -4,40 +4,40 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/cf9bdda.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 annotated-types==0.7.0 anthropic==0.26.0 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 boto3==1.34.51 botocore==1.34.51 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==5.4.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.6.7 defusedxml==0.7.1 distro==1.9.0 exceptiongroup==1.2.2 faiss-cpu==1.8.0 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 httpx-sse==0.4.0 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 +idna==3.10 +importlib-metadata==8.5.0 iniconfig==2.0.0 jmespath==1.0.1 jsonpatch==1.33 @@ -51,8 +51,8 @@ langchain-core==0.1.52 langchain-openai==0.1.6 langchain-pinecone==0.1.0 langchain-text-splitters==0.0.2 -langsmith==0.1.117 -marshmallow==3.22.0 +langsmith==0.1.147 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -60,39 +60,41 @@ numexpr==2.8.5 numpy==1.26.4 openai==1.30.3 opentracing==2.4.0 -orjson==3.10.7 +orjson==3.10.12 packaging==23.2 pinecone-client==3.2.2 pluggy==1.5.0 -psutil==6.0.0 -pydantic==2.9.1 -pydantic-core==2.23.3 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==2.10.3 +pydantic-core==2.27.1 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 -s3transfer==0.10.2 +requests-toolbelt==1.0.0 +s3transfer==0.10.4 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 +tiktoken==0.8.0 tokenizers==0.19.1 -tomli==2.0.1 -tqdm==4.66.5 +tomli==2.2.1 +tqdm==4.67.1 types-requests==2.31.0.6 types-urllib3==1.26.25.14 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==1.26.20 vcrpy==5.1.0 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/d39d3de.txt b/.riot/requirements/d39d3de.txt index a1e88b6d11b..53ccfd3b257 100644 --- a/.riot/requirements/d39d3de.txt +++ b/.riot/requirements/d39d3de.txt @@ -4,33 +4,33 @@ # # pip-compile --allow-unsafe --no-annotate .riot/requirements/d39d3de.in # -ai21==2.14.1 +ai21==3.0.1 ai21-tokenizer==0.12.0 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.10 aiosignal==1.3.1 -anyio==4.4.0 +anyio==4.7.0 async-timeout==4.0.3 attrs==24.2.0 backoff==2.2.1 certifi==2024.8.30 -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 cohere==4.57 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 dataclasses-json==0.5.14 -dnspython==2.6.1 +dnspython==2.7.0 exceptiongroup==1.2.2 fastavro==1.9.7 -filelock==3.16.0 -frozenlist==1.4.1 -fsspec==2024.9.0 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.10.0 greenlet==3.0.3 h11==0.14.0 -httpcore==1.0.5 +httpcore==1.0.7 httpx==0.27.2 -huggingface-hub==0.24.6 +huggingface-hub==0.26.5 hypothesis==6.45.0 -idna==3.8 +idna==3.10 importlib-metadata==6.11.0 iniconfig==2.0.0 jsonpatch==1.33 @@ -40,8 +40,8 @@ langchain-community==0.0.14 langchain-core==0.1.23 langchainplus-sdk==0.0.4 langsmith==0.0.87 -loguru==0.7.2 -marshmallow==3.22.0 +loguru==0.7.3 +marshmallow==3.23.1 mock==5.1.0 multidict==6.1.0 mypy-extensions==1.0.0 @@ -53,31 +53,32 @@ opentracing==2.4.0 packaging==23.2 pinecone-client==2.2.4 pluggy==1.5.0 -psutil==6.0.0 -pydantic==1.10.18 -pytest==8.3.3 +propcache==0.2.1 +psutil==6.1.0 +pydantic==1.10.19 +pytest==8.3.4 pytest-asyncio==0.23.7 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.2 -regex==2024.7.24 +regex==2024.11.6 requests==2.32.3 sentencepiece==0.2.0 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -sqlalchemy==2.0.34 +sqlalchemy==2.0.36 tenacity==8.5.0 -tiktoken==0.7.0 -tokenizers==0.20.0 -tomli==2.0.1 -tqdm==4.66.5 +tiktoken==0.8.0 +tokenizers==0.21.0 +tomli==2.2.1 +tqdm==4.67.1 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==1.26.20 vcrpy==6.0.1 -wrapt==1.16.0 -yarl==1.11.1 -zipp==3.20.1 +wrapt==1.17.0 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/d5098dd.txt b/.riot/requirements/d5098dd.txt new file mode 100644 index 00000000000..bb4ade61f8a --- /dev/null +++ b/.riot/requirements/d5098dd.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/d5098dd.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elasticsearch7==7.17.12 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/d7dfbc2.txt b/.riot/requirements/d7dfbc2.txt new file mode 100644 index 00000000000..2bee6eee691 --- /dev/null +++ b/.riot/requirements/d7dfbc2.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/d7dfbc2.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +dnspython==2.7.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +mongoengine==0.29.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pymongo==4.10.1 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/d81ad99.txt b/.riot/requirements/d81ad99.txt new file mode 100644 index 00000000000..3efb0a138c2 --- /dev/null +++ b/.riot/requirements/d81ad99.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/d81ad99.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/db78045.txt b/.riot/requirements/db78045.txt new file mode 100644 index 00000000000..7a92cc52123 --- /dev/null +++ b/.riot/requirements/db78045.txt @@ -0,0 +1,21 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/db78045.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +elasticsearch2==2.5.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/dbc6a48.txt b/.riot/requirements/dbc6a48.txt new file mode 100644 index 00000000000..e29a7f2eeee --- /dev/null +++ b/.riot/requirements/dbc6a48.txt @@ -0,0 +1,35 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/dbc6a48.in +# +amqp==5.3.1 +attrs==24.2.0 +billiard==4.2.1 +celery[redis]==5.4.0 +click==8.1.7 +click-didyoumean==0.3.1 +click-plugins==1.1.1 +click-repl==0.3.0 +coverage[toml]==7.6.9 +hypothesis==6.45.0 +iniconfig==2.0.0 +kombu==5.4.2 +mock==5.1.0 +more-itertools==8.10.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +prompt-toolkit==3.0.48 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +redis==5.2.1 +six==1.17.0 +sortedcontainers==2.4.0 +tzdata==2024.2 +vine==5.1.0 +wcwidth==0.2.13 diff --git a/.riot/requirements/dbeb1d7.txt b/.riot/requirements/dbeb1d7.txt new file mode 100644 index 00000000000..bbde6777f1c --- /dev/null +++ b/.riot/requirements/dbeb1d7.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/dbeb1d7.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/ddd8721.txt b/.riot/requirements/ddd8721.txt new file mode 100644 index 00000000000..baa4f15e9af --- /dev/null +++ b/.riot/requirements/ddd8721.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/ddd8721.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/dedea98.txt b/.riot/requirements/dedea98.txt new file mode 100644 index 00000000000..dca66df78da --- /dev/null +++ b/.riot/requirements/dedea98.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/dedea98.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +structlog==24.4.0 diff --git a/.riot/requirements/df7a937.txt b/.riot/requirements/df7a937.txt new file mode 100644 index 00000000000..35a49fc7ae3 --- /dev/null +++ b/.riot/requirements/df7a937.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/df7a937.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/e06abee.txt b/.riot/requirements/e06abee.txt new file mode 100644 index 00000000000..e7be89f2738 --- /dev/null +++ b/.riot/requirements/e06abee.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e06abee.in +# +annotated-types==0.7.0 +attrs==24.2.0 +blinker==1.8.2 +certifi==2024.8.30 +charset-normalizer==3.3.2 +click==8.1.7 +coverage[toml]==7.6.1 +flask==3.0.3 +flask-openapi3==4.0.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pydantic==2.9.2 +pydantic-core==2.23.4 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +typing-extensions==4.12.2 +urllib3==1.26.20 +werkzeug==3.0.4 +zipp==3.20.2 diff --git a/.riot/requirements/e20152c.txt b/.riot/requirements/e20152c.txt new file mode 100644 index 00000000000..3aeacecfdcd --- /dev/null +++ b/.riot/requirements/e20152c.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e20152c.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/e2bf559.txt b/.riot/requirements/e2bf559.txt new file mode 100644 index 00000000000..cef46e50c2d --- /dev/null +++ b/.riot/requirements/e2bf559.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e2bf559.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elastic-transport==8.15.0 +elasticsearch==8.15.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==2.2.3 diff --git a/.riot/requirements/e5cd460.txt b/.riot/requirements/e5cd460.txt new file mode 100644 index 00000000000..d14867cc689 --- /dev/null +++ b/.riot/requirements/e5cd460.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e5cd460.in +# +attrs==24.3.0 +coverage[toml]==7.6.9 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +py-cpuinfo==9.0.0 +pytest==8.3.4 +pytest-benchmark==4.0.0 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/e75aea6.txt b/.riot/requirements/e75aea6.txt index 4d191b83adc..704e2fecf98 100644 --- a/.riot/requirements/e75aea6.txt +++ b/.riot/requirements/e75aea6.txt @@ -5,20 +5,20 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/e75aea6.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 iniconfig==2.0.0 mariadb==1.0.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.0 +tomli==2.2.1 +zipp==3.21.0 diff --git a/.riot/requirements/ed78a8f.txt b/.riot/requirements/ed78a8f.txt index 95a78831a4d..d9c1cb0b78f 100644 --- a/.riot/requirements/ed78a8f.txt +++ b/.riot/requirements/ed78a8f.txt @@ -5,16 +5,16 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/ed78a8f.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pyodbc==5.1.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pyodbc==5.2.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/ee48b16.txt b/.riot/requirements/ee48b16.txt new file mode 100644 index 00000000000..116921f222d --- /dev/null +++ b/.riot/requirements/ee48b16.txt @@ -0,0 +1,22 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/ee48b16.in +# +attrs==24.2.0 +certifi==2024.8.30 +coverage[toml]==7.6.1 +elasticsearch==7.13.4 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/.riot/requirements/f20c964.txt b/.riot/requirements/f20c964.txt new file mode 100644 index 00000000000..ab4cf486d17 --- /dev/null +++ b/.riot/requirements/f20c964.txt @@ -0,0 +1,30 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f20c964.in +# +attrs==24.2.0 +blinker==1.8.2 +cachelib==0.9.0 +click==8.1.7 +coverage[toml]==7.6.1 +flask==3.0.3 +flask-caching==2.3.0 +hypothesis==6.45.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.1.5 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +python-memcached==1.62 +redis==5.1.1 +sortedcontainers==2.4.0 +werkzeug==3.0.4 diff --git a/.riot/requirements/f339e99.txt b/.riot/requirements/f339e99.txt new file mode 100644 index 00000000000..b300c0bc5b4 --- /dev/null +++ b/.riot/requirements/f339e99.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f339e99.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/f33b994.txt b/.riot/requirements/f33b994.txt new file mode 100644 index 00000000000..28facac819d --- /dev/null +++ b/.riot/requirements/f33b994.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f33b994.in +# +attrs==24.2.0 +click==8.1.7 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +redis==5.1.1 +rq==1.16.2 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/f46a802.txt b/.riot/requirements/f46a802.txt new file mode 100644 index 00000000000..46033d5a506 --- /dev/null +++ b/.riot/requirements/f46a802.txt @@ -0,0 +1,20 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f46a802.in +# +attrs==24.2.0 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +sortedcontainers==2.4.0 diff --git a/.riot/requirements/f4fafb3.txt b/.riot/requirements/f4fafb3.txt new file mode 100644 index 00000000000..09db801e27b --- /dev/null +++ b/.riot/requirements/f4fafb3.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f4fafb3.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==2.1.1 +click==8.1.7 +coverage[toml]==7.6.1 +deprecated==1.2.14 +flask==2.1.3 +gevent==24.2.1 +greenlet==3.1.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.0.0 +iniconfig==2.0.0 +itsdangerous==2.2.0 +jinja2==3.1.4 +markupsafe==2.0.1 +mock==5.1.0 +opentelemetry-api==1.26.0 +opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation-flask==0.47b0 +opentelemetry-instrumentation-wsgi==0.47b0 +opentelemetry-semantic-conventions==0.47b0 +opentelemetry-util-http==0.47b0 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.28.1 +sortedcontainers==2.4.0 +urllib3==1.26.20 +werkzeug==2.1.2 +wrapt==1.16.0 +zipp==3.20.2 +zope-event==5.0 +zope-interface==7.0.3 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.1.0 diff --git a/.riot/requirements/f7ca81b.txt b/.riot/requirements/f7ca81b.txt index 8a63c4f0856..14de8077cae 100644 --- a/.riot/requirements/f7ca81b.txt +++ b/.riot/requirements/f7ca81b.txt @@ -2,21 +2,21 @@ # This file is autogenerated by pip-compile with Python 3.7 # by the following command: # -# pip-compile --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/f7ca81b.in +# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/f7ca81b.in # asyncpg==0.28.0 -attrs==23.2.0 +attrs==24.2.0 coverage[toml]==7.2.7 -exceptiongroup==1.2.0 +exceptiongroup==1.2.2 hypothesis==6.45.0 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==23.2 +packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -pytest-asyncio==0.21.1 +pytest-asyncio==0.21.2 pytest-cov==4.1.0 pytest-mock==3.11.1 pytest-randomly==3.12.0 diff --git a/.riot/requirements/fa9267f.txt b/.riot/requirements/fa9267f.txt index 80c862d47e3..675a460f3bc 100644 --- a/.riot/requirements/fa9267f.txt +++ b/.riot/requirements/fa9267f.txt @@ -2,20 +2,20 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --no-annotate .riot/requirements/fa9267f.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/fa9267f.in # -asyncpg==0.29.0 -attrs==23.2.0 -coverage[toml]==7.5.4 +asyncpg==0.30.0 +attrs==24.2.0 +coverage[toml]==7.6.9 hypothesis==6.45.0 iniconfig==2.0.0 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.2.2 +pytest==8.3.4 pytest-asyncio==0.21.2 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 diff --git a/.riot/requirements/fb50881.txt b/.riot/requirements/fb50881.txt index f39cd8b01b4..6ec4fc75d44 100644 --- a/.riot/requirements/fb50881.txt +++ b/.riot/requirements/fb50881.txt @@ -5,18 +5,18 @@ # pip-compile --allow-unsafe --no-annotate .riot/requirements/fb50881.in # attrs==24.2.0 -coverage[toml]==7.6.1 +coverage[toml]==7.6.9 exceptiongroup==1.2.2 hypothesis==6.45.0 iniconfig==2.0.0 -mariadb==1.1.10 +mariadb==1.1.11 mock==5.1.0 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 +pytest==8.3.4 +pytest-cov==6.0.0 pytest-mock==3.14.0 -pytest-randomly==3.15.0 +pytest-randomly==3.16.0 sortedcontainers==2.4.0 -tomli==2.0.1 +tomli==2.2.1 diff --git a/.riot/requirements/fbee8ab.txt b/.riot/requirements/fbee8ab.txt new file mode 100644 index 00000000000..df12821215c --- /dev/null +++ b/.riot/requirements/fbee8ab.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/fbee8ab.in +# +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.3.2 +coverage[toml]==7.6.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opensearch-py[requests]==2.0.1 +opentracing==2.4.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.3 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==1.26.20 diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index b0c17213130..488211e46b1 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -94,6 +94,7 @@ "yaaredis": True, "asyncpg": True, "aws_lambda": True, # patch only in AWS Lambda environments + "azure_functions": True, "tornado": False, "openai": True, "langchain": True, @@ -143,6 +144,7 @@ "futures": ("concurrent.futures.thread",), "vertica": ("vertica_python",), "aws_lambda": ("datadog_lambda",), + "azure_functions": ("azure.functions",), "httplib": ("http.client",), "kafka": ("confluent_kafka",), "google_generativeai": ("google.generativeai",), @@ -171,17 +173,22 @@ def on_import(hook): path = "%s.%s" % (prefix, module) try: imported_module = importlib.import_module(path) + imported_module.patch() + if hasattr(imported_module, "patch_submodules"): + imported_module.patch_submodules(patch_indicator) except Exception as e: if raise_errors: raise - error_msg = "failed to import ddtrace module %r when patching on import" % (path,) - log.error(error_msg, exc_info=True) - telemetry.telemetry_writer.add_integration(module, False, PATCH_MODULES.get(module) is True, error_msg) + log.error( + "failed to enable ddtrace support for %s: %s", + module, + str(e), + ) + telemetry.telemetry_writer.add_integration(module, False, PATCH_MODULES.get(module) is True, str(e)) telemetry.telemetry_writer.add_count_metric( "tracers", "integration_errors", 1, (("integration_name", module), ("error_type", type(e).__name__)) ) else: - imported_module.patch() if hasattr(imported_module, "get_versions"): versions = imported_module.get_versions() for name, v in versions.items(): @@ -194,9 +201,6 @@ def on_import(hook): module, True, PATCH_MODULES.get(module) is True, "", version=version ) - if hasattr(imported_module, "patch_submodules"): - imported_module.patch_submodules(patch_indicator) - return on_import diff --git a/ddtrace/_trace/trace_handlers.py b/ddtrace/_trace/trace_handlers.py index 1807ae220f6..7c2ba02d6b4 100644 --- a/ddtrace/_trace/trace_handlers.py +++ b/ddtrace/_trace/trace_handlers.py @@ -28,6 +28,7 @@ from ddtrace.ext import http from ddtrace.internal import core from ddtrace.internal.compat import maybe_stringify +from ddtrace.internal.compat import parse from ddtrace.internal.constants import COMPONENT from ddtrace.internal.constants import FLASK_ENDPOINT from ddtrace.internal.constants import FLASK_URL_RULE @@ -675,6 +676,40 @@ def _set_span_pointer(span: "Span", span_pointer_description: _SpanPointerDescri ) +def _set_azure_function_tags(span, azure_functions_config, function_name, trigger): + span.set_tag_str(COMPONENT, azure_functions_config.integration_name) + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + span.set_tag_str("aas.function.name", function_name) # codespell:ignore + span.set_tag_str("aas.function.trigger", trigger) # codespell:ignore + + +def _on_azure_functions_request_span_modifier(ctx, azure_functions_config, req): + span = ctx.get_item("req_span") + parsed_url = parse.urlparse(req.url) + path = parsed_url.path + span.resource = f"{req.method} {path}" + trace_utils.set_http_meta( + span, + azure_functions_config, + method=req.method, + url=req.url, + request_headers=req.headers, + request_body=req.get_body(), + route=path, + ) + + +def _on_azure_functions_start_response(ctx, azure_functions_config, res, function_name, trigger): + span = ctx.get_item("req_span") + _set_azure_function_tags(span, azure_functions_config, function_name, trigger) + trace_utils.set_http_meta( + span, + azure_functions_config, + status_code=res.status_code if res else None, + response_headers=res.headers if res else None, + ) + + def listen(): core.on("wsgi.request.prepare", _on_request_prepare) core.on("wsgi.request.prepared", _on_request_prepared) @@ -723,6 +758,8 @@ def listen(): core.on("botocore.kinesis.GetRecords.post", _on_botocore_kinesis_getrecords_post) core.on("redis.async_command.post", _on_redis_command_post) core.on("redis.command.post", _on_redis_command_post) + core.on("azure.functions.request_call_modifier", _on_azure_functions_request_span_modifier) + core.on("azure.functions.start_response", _on_azure_functions_start_response) core.on("test_visibility.enable", _on_test_visibility_enable) core.on("test_visibility.disable", _on_test_visibility_disable) @@ -754,6 +791,7 @@ def listen(): "rq.worker.perform_job", "rq.job.perform", "rq.job.fetch_many", + "azure.functions.patched_route_request", ): core.on(f"context.started.start_span.{context_name}", _start_span) diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 8c82efbdf37..6027976d6dc 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -236,7 +236,9 @@ def __init__( self._iast_enabled = asm_config._iast_enabled self._appsec_standalone_enabled = asm_config._appsec_standalone_enabled self._dogstatsd_url = agent.get_stats_url() if dogstatsd_url is None else dogstatsd_url - self._apm_opt_out = (self._asm_enabled or self._iast_enabled) and self._appsec_standalone_enabled + self._apm_opt_out = self._appsec_standalone_enabled and ( + self._asm_enabled or self._iast_enabled or config._sca_enabled + ) if self._apm_opt_out: self.enabled = False # Disable compute stats (neither agent or tracer should compute them) @@ -498,7 +500,7 @@ def configure( if appsec_standalone_enabled is not None: self._appsec_standalone_enabled = asm_config._appsec_standalone_enabled = appsec_standalone_enabled - if self._appsec_standalone_enabled and (self._asm_enabled or self._iast_enabled): + if self._appsec_standalone_enabled and (self._asm_enabled or self._iast_enabled or config._sca_enabled): self._apm_opt_out = True self.enabled = False # Disable compute stats (neither agent or tracer should compute them) diff --git a/ddtrace/appsec/_common_module_patches.py b/ddtrace/appsec/_common_module_patches.py index a5ab2d1533d..e7ce12d13e9 100644 --- a/ddtrace/appsec/_common_module_patches.py +++ b/ddtrace/appsec/_common_module_patches.py @@ -60,8 +60,10 @@ def wrapped_read_F3E51D71B4EC16EF(original_read_callable, instance, args, kwargs """ wrapper for _io.BytesIO and _io.StringIO read function """ + from ddtrace.appsec._iast._iast_request_context import is_iast_request_enabled + result = original_read_callable(*args, **kwargs) - if asm_config._iast_enabled: + if asm_config._iast_enabled and is_iast_request_enabled(): from ddtrace.appsec._iast._taint_tracking import OriginType from ddtrace.appsec._iast._taint_tracking import Source from ddtrace.appsec._iast._taint_tracking import get_tainted_ranges @@ -87,7 +89,9 @@ def wrapped_open_CFDDB7ABBA9081B6(original_open_callable, instance, args, kwargs """ wrapper for open file function """ - if asm_config._iast_enabled: + from ddtrace.appsec._iast._iast_request_context import is_iast_request_enabled + + if asm_config._iast_enabled and is_iast_request_enabled(): try: from ddtrace.appsec._iast.taint_sinks.path_traversal import check_and_report_path_traversal @@ -176,7 +180,9 @@ def wrapped_request_D8CB81E472AF98A2(original_request_callable, instance, args, wrapper for third party requests.request function https://requests.readthedocs.io """ - if asm_config._iast_enabled: + from ddtrace.appsec._iast._iast_request_context import is_iast_request_enabled + + if asm_config._iast_enabled and is_iast_request_enabled(): from ddtrace.appsec._iast.taint_sinks.ssrf import _iast_report_ssrf _iast_report_ssrf(original_request_callable, *args, **kwargs) @@ -216,7 +222,9 @@ def wrapped_system_5542593D237084A7(original_command_callable, instance, args, k """ command = args[0] if args else kwargs.get("command", None) if command is not None: - if asm_config._iast_enabled: + from ddtrace.appsec._iast._iast_request_context import is_iast_request_enabled + + if asm_config._iast_enabled and is_iast_request_enabled(): from ddtrace.appsec._iast.taint_sinks.command_injection import _iast_report_cmdi _iast_report_cmdi(command) diff --git a/ddtrace/appsec/_constants.py b/ddtrace/appsec/_constants.py index a127ebb6615..83cb53e78ff 100644 --- a/ddtrace/appsec/_constants.py +++ b/ddtrace/appsec/_constants.py @@ -129,6 +129,10 @@ class IAST(metaclass=Constant_Class): ENV_DEBUG: Literal["DD_IAST_DEBUG"] = "DD_IAST_DEBUG" ENV_PROPAGATION_DEBUG: Literal["DD_IAST_PROPAGATION_DEBUG"] = "DD_IAST_PROPAGATION_DEBUG" ENV_REQUEST_SAMPLING: Literal["DD_IAST_REQUEST_SAMPLING"] = "DD_IAST_REQUEST_SAMPLING" + DD_IAST_VULNERABILITIES_PER_REQUEST: Literal[ + "DD_IAST_VULNERABILITIES_PER_REQUEST" + ] = "DD_IAST_VULNERABILITIES_PER_REQUEST" + DD_IAST_MAX_CONCURRENT_REQUESTS: Literal["DD_IAST_MAX_CONCURRENT_REQUESTS"] = "DD_IAST_MAX_CONCURRENT_REQUESTS" ENV_TELEMETRY_REPORT_LVL: Literal["DD_IAST_TELEMETRY_VERBOSITY"] = "DD_IAST_TELEMETRY_VERBOSITY" LAZY_TAINT: Literal["_DD_IAST_LAZY_TAINT"] = "_DD_IAST_LAZY_TAINT" JSON: Literal["_dd.iast.json"] = "_dd.iast.json" @@ -324,7 +328,6 @@ class DEFAULT(metaclass=Constant_Class): class EXPLOIT_PREVENTION(metaclass=Constant_Class): - STACK_TRACES: Literal["_dd.stack"] = "_dd.stack" STACK_TRACE_ID: Literal["stack_id"] = "stack_id" EP_ENABLED: Literal["DD_APPSEC_RASP_ENABLED"] = "DD_APPSEC_RASP_ENABLED" STACK_TRACE_ENABLED: Literal["DD_APPSEC_STACK_TRACE_ENABLED"] = "DD_APPSEC_STACK_TRACE_ENABLED" @@ -354,3 +357,9 @@ class FINGERPRINTING(metaclass=Constant_Class): HEADER = PREFIX + "http.header" NETWORK = PREFIX + "http.network" SESSION = PREFIX + "session" + + +class STACK_TRACE(metaclass=Constant_Class): + RASP = "exploit" + IAST = "vulnerability" + TAG: Literal["_dd.stack"] = "_dd.stack" diff --git a/ddtrace/appsec/_exploit_prevention/stack_traces.py b/ddtrace/appsec/_exploit_prevention/stack_traces.py index 8276d0c51bc..8d32a028ab9 100644 --- a/ddtrace/appsec/_exploit_prevention/stack_traces.py +++ b/ddtrace/appsec/_exploit_prevention/stack_traces.py @@ -3,36 +3,48 @@ from typing import Any from typing import Dict from typing import Iterable -from typing import List from typing import Optional from ddtrace._trace.span import Span -from ddtrace.appsec._constants import EXPLOIT_PREVENTION +from ddtrace.appsec._constants import STACK_TRACE from ddtrace.settings.asm import config as asm_config import ddtrace.tracer def report_stack( - message: str, span: Optional[Span] = None, crop_stack: Optional[str] = None, stack_id: Optional[str] = None -): + message: Optional[str] = None, + span: Optional[Span] = None, + crop_stack: Optional[str] = None, + stack_id: Optional[str] = None, + namespace: str = STACK_TRACE.RASP, +) -> bool: """ Report a stack trace to the current span. This is used to report stack traces for exploit prevention. Return the stack id for the reported stack trace to link it in triggers. """ - if not asm_config._ep_enabled or not asm_config._ep_stack_trace_enabled: - return None + if not asm_config._ep_stack_trace_enabled: + # stack trace report disabled + return False + if namespace == STACK_TRACE.RASP and not (asm_config._asm_enabled and asm_config._ep_enabled): + # exploit prevention stack trace with ep disabled + return False + if namespace == STACK_TRACE.IAST and not (asm_config._iast_enabled): + # iast stack trace with iast disabled + return False + if span is None: span = ddtrace.tracer.current_span() if span is None or stack_id is None: - return None + return False root_span = span._local_root or span - appsec_traces = root_span.get_struct_tag(EXPLOIT_PREVENTION.STACK_TRACES) or {} - exploit: List[Any] = appsec_traces.get("exploit", []) + appsec_traces = root_span.get_struct_tag(STACK_TRACE.TAG) or {} + current_list = appsec_traces.get(namespace, []) + total_length = len(current_list) # Do not report more than the maximum number of stack traces - if asm_config._ep_max_stack_traces and len(exploit) >= asm_config._ep_max_stack_traces: - return None + if asm_config._ep_max_stack_traces and total_length >= asm_config._ep_max_stack_traces: + return False stack = inspect.stack() if crop_stack is not None: @@ -43,8 +55,9 @@ def report_stack( res: Dict[str, Any] = { "language": "python", "id": stack_id, - "message": message, } + if message is not None: + res["message"] = message if len(stack) > asm_config._ep_max_stack_trace_depth > 0: top_stack = int(asm_config._ep_max_stack_trace_depth * asm_config._ep_stack_top_percent / 100) bottom_stack = asm_config._ep_max_stack_trace_depth - top_stack @@ -61,6 +74,7 @@ def report_stack( for i in iterator ] res["frames"] = frames - exploit.append(res) - appsec_traces["exploit"] = exploit - root_span.set_struct_tag(EXPLOIT_PREVENTION.STACK_TRACES, appsec_traces) + current_list.append(res) + appsec_traces[namespace] = current_list + root_span.set_struct_tag(STACK_TRACE.TAG, appsec_traces) + return True diff --git a/ddtrace/appsec/_iast/__init__.py b/ddtrace/appsec/_iast/__init__.py index 5aab86cf783..724819b17df 100644 --- a/ddtrace/appsec/_iast/__init__.py +++ b/ddtrace/appsec/_iast/__init__.py @@ -1,4 +1,4 @@ -"""IAST (interactive application security testing) analyzes code for security vulnerabilities. +"""IAST (Interactive Application Security Testing) analyzes code for security vulnerabilities. To add new vulnerabilities analyzers (Taint sink) we should update `IAST_PATCH` in `ddtrace/appsec/iast/_patch_modules.py` @@ -29,10 +29,13 @@ def wrapped_function(wrapped, instance, args, kwargs): """ # noqa: RST201, RST213, RST210 import inspect +import os import sys +import types from ddtrace.internal.logger import get_logger from ddtrace.internal.module import ModuleWatchdog +from ddtrace.settings.asm import config as asm_config from ._overhead_control_engine import OverheadControl from ._utils import _is_iast_enabled @@ -59,7 +62,7 @@ def ddtrace_iast_flask_patch(): module_name = inspect.currentframe().f_back.f_globals["__name__"] module = sys.modules[module_name] try: - module_path, patched_ast = astpatch_module(module, remove_flask_run=True) + module_path, patched_ast = astpatch_module(module) except Exception: log.debug("Unexpected exception while AST patching", exc_info=True) return @@ -69,8 +72,12 @@ def ddtrace_iast_flask_patch(): return compiled_code = compile(patched_ast, module_path, "exec") + # creating a new module environment to execute the patched code from scratch + new_module = types.ModuleType(module_name) + module.__dict__.clear() + module.__dict__.update(new_module.__dict__) + # executing the compiled code in the new module environment exec(compiled_code, module.__dict__) # nosec B102 - sys.modules[module_name] = compiled_code _iast_propagation_enabled = False @@ -91,6 +98,26 @@ def enable_iast_propagation(): _iast_propagation_enabled = True +def _iast_pytest_activation(): + global _iast_propagation_enabled + global oce + if _iast_propagation_enabled: + return + os.environ["DD_IAST_ENABLED"] = os.environ.get("DD_IAST_ENABLED") or "1" + os.environ["_DD_IAST_USE_ROOT_SPAN"] = os.environ.get("_DD_IAST_USE_ROOT_SPAN") or "true" + os.environ["DD_IAST_REQUEST_SAMPLING"] = os.environ.get("DD_IAST_REQUEST_SAMPLING") or "100.0" + os.environ["_DD_APPSEC_DEDUPLICATION_ENABLED"] = os.environ.get("_DD_APPSEC_DEDUPLICATION_ENABLED") or "false" + os.environ["DD_IAST_VULNERABILITIES_PER_REQUEST"] = os.environ.get("DD_IAST_VULNERABILITIES_PER_REQUEST") or "1000" + os.environ["DD_IAST_MAX_CONCURRENT_REQUESTS"] = os.environ.get("DD_IAST_MAX_CONCURRENT_REQUESTS") or "1000" + + asm_config._iast_request_sampling = 100.0 + asm_config._deduplication_enabled = False + asm_config._iast_max_vulnerabilities_per_requests = 1000 + asm_config._iast_max_concurrent_requests = 1000 + enable_iast_propagation() + oce.reconfigure() + + def disable_iast_propagation(): """Remove IAST AST patching from the ModuleWatchdog. Only for testing proposes""" # DEV: These imports are here to avoid _ast.ast_patching import in the top level diff --git a/ddtrace/appsec/_iast/_ast/ast_patching.py b/ddtrace/appsec/_iast/_ast/ast_patching.py index 6a1e4c2d3b6..7e2258bd556 100644 --- a/ddtrace/appsec/_iast/_ast/ast_patching.py +++ b/ddtrace/appsec/_iast/_ast/ast_patching.py @@ -3,7 +3,6 @@ import ast import codecs import os -import re from sys import builtin_module_names from sys import version_info import textwrap @@ -388,27 +387,6 @@ def visit_ast( return modified_ast -_FLASK_INSTANCE_REGEXP = re.compile(r"(\S*)\s*=.*Flask\(.*") - - -def _remove_flask_run(text: Text) -> Text: - """ - Find and remove flask app.run() call. This is used for patching - the app.py file and exec'ing to replace the module without creating - a new instance. - """ - flask_instance_name = re.search(_FLASK_INSTANCE_REGEXP, text) - if not flask_instance_name: - return text - groups = flask_instance_name.groups() - if not groups: - return text - - instance_name = groups[-1] - new_text = re.sub(instance_name + r"\.run\(.*\)", "pass", text) - return new_text - - _DIR_WRAPPER = textwrap.dedent( f""" @@ -442,7 +420,7 @@ def {_PREFIX}set_dir_filter(): ) -def astpatch_module(module: ModuleType, remove_flask_run: bool = False) -> Tuple[str, Optional[ast.Module]]: +def astpatch_module(module: ModuleType) -> Tuple[str, Optional[ast.Module]]: module_name = module.__name__ module_origin = origin(module) @@ -482,9 +460,6 @@ def astpatch_module(module: ModuleType, remove_flask_run: bool = False) -> Tuple log.debug("empty file: %s", module_path) return "", None - if remove_flask_run: - source_text = _remove_flask_run(source_text) - if not asbool(os.environ.get(IAST.ENV_NO_DIR_PATCH, "false")) and version_info > (3, 7): # Add the dir filter so __ddtrace stuff is not returned by dir(module) # does not work in 3.7 because it enters into infinite recursion diff --git a/ddtrace/appsec/_iast/_iast_request_context.py b/ddtrace/appsec/_iast/_iast_request_context.py index f49d2bc59bd..a28c2d3ff0d 100644 --- a/ddtrace/appsec/_iast/_iast_request_context.py +++ b/ddtrace/appsec/_iast/_iast_request_context.py @@ -1,3 +1,4 @@ +import os import sys from typing import Dict from typing import Optional @@ -21,6 +22,7 @@ from ddtrace.constants import ORIGIN_KEY from ddtrace.internal import core from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.formats import asbool log = get_logger(__name__) @@ -48,6 +50,7 @@ def __init__(self, span: Optional[Span] = None): self.request_enabled: bool = False self.iast_reporter: Optional[IastSpanReporter] = None self.iast_span_metrics: Dict[str, int] = {} + self.iast_stack_trace_id: int = 0 def _get_iast_context() -> Optional[IASTEnvironment]: @@ -94,6 +97,14 @@ def get_iast_reporter() -> Optional[IastSpanReporter]: return None +def get_iast_stacktrace_id() -> int: + env = _get_iast_context() + if env: + env.iast_stack_trace_id += 1 + return env.iast_stack_trace_id + return 0 + + def set_iast_request_enabled(request_enabled) -> None: env = _get_iast_context() if env: @@ -109,39 +120,61 @@ def is_iast_request_enabled(): return False +def _move_iast_data_to_root_span(): + return asbool(os.getenv("_DD_IAST_USE_ROOT_SPAN")) + + +def _create_and_attach_iast_report_to_span(req_span: Span, existing_data: Optional[str], merge: bool = False): + report_data: Optional[IastSpanReporter] = get_iast_reporter() + if merge and existing_data is not None and report_data is not None: + previous_data = IastSpanReporter() + previous_data._from_json(existing_data) + + report_data._merge(previous_data) + + if report_data is not None: + report_data.build_and_scrub_value_parts() + req_span.set_tag_str(IAST.JSON, report_data._to_str()) + _set_metric_iast_request_tainted() + _set_span_tag_iast_request_tainted(req_span) + _set_span_tag_iast_executed_sink(req_span) + + set_iast_request_enabled(False) + end_iast_context(req_span) + + if req_span.get_tag(ORIGIN_KEY) is None: + req_span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) + + oce.release_request() + + def _iast_end_request(ctx=None, span=None, *args, **kwargs): try: - if span: - req_span = span + move_to_root = _move_iast_data_to_root_span() + if move_to_root: + req_span = core.get_root_span() else: - req_span = ctx.get_item("req_span") + if span: + req_span = span + else: + req_span = ctx.get_item("req_span") if _is_iast_enabled(): - exist_data = req_span.get_tag(IAST.JSON) - if exist_data is None and req_span.get_metric(IAST.ENABLED) is None: - if not is_iast_request_enabled(): - req_span.set_metric(IAST.ENABLED, 0.0) - end_iast_context(req_span) - oce.release_request() - return - - req_span.set_metric(IAST.ENABLED, 1.0) - report_data: Optional[IastSpanReporter] = get_iast_reporter() - - if report_data: - report_data.build_and_scrub_value_parts() - req_span.set_tag_str(IAST.JSON, report_data._to_str()) - _set_metric_iast_request_tainted() - _set_span_tag_iast_request_tainted(req_span) - _set_span_tag_iast_executed_sink(req_span) - - set_iast_request_enabled(False) - end_iast_context(req_span) - - if req_span.get_tag(ORIGIN_KEY) is None: - req_span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) - - oce.release_request() + existing_data = req_span.get_tag(IAST.JSON) + if existing_data is None: + if req_span.get_metric(IAST.ENABLED) is None: + if not is_iast_request_enabled(): + req_span.set_metric(IAST.ENABLED, 0.0) + end_iast_context(req_span) + oce.release_request() + return + + req_span.set_metric(IAST.ENABLED, 1.0) + _create_and_attach_iast_report_to_span(req_span, existing_data, merge=False) + + elif move_to_root: + # Data exists from a previous request, we will merge both reports + _create_and_attach_iast_report_to_span(req_span, existing_data, merge=True) except Exception: log.debug("[IAST] Error finishing IAST context", exc_info=True) diff --git a/ddtrace/appsec/_iast/_overhead_control_engine.py b/ddtrace/appsec/_iast/_overhead_control_engine.py index 036e4d3cbfd..b1f490b14ef 100644 --- a/ddtrace/appsec/_iast/_overhead_control_engine.py +++ b/ddtrace/appsec/_iast/_overhead_control_engine.py @@ -3,7 +3,6 @@ limit. It will measure operations being executed in a request and it will deactivate detection (and therefore reduce the overhead to nearly 0) if a certain threshold is reached. """ -import os from typing import Set from typing import Text from typing import Tuple @@ -25,22 +24,18 @@ def get_request_sampling_value() -> float: return float(asm_config._iast_request_sampling) -MAX_REQUESTS = int(os.environ.get("DD_IAST_MAX_CONCURRENT_REQUESTS", 2)) -MAX_VULNERABILITIES_PER_REQUEST = int(os.environ.get("DD_IAST_VULNERABILITIES_PER_REQUEST", 2)) - - class Operation(object): """Common operation related to Overhead Control Engine (OCE). Every vulnerabilities/taint_sinks should inherit from this class. OCE instance calls these methods to control the overhead produced in each request. """ _lock = threading.Lock() - _vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST + _vulnerability_quota = asm_config._iast_max_vulnerabilities_per_requests _reported_vulnerabilities: Set[Tuple[str, int]] = set() @classmethod def reset(cls): - cls._vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST + cls._vulnerability_quota = asm_config._iast_max_vulnerabilities_per_requests cls._reported_vulnerabilities = set() @classmethod @@ -57,7 +52,7 @@ def acquire_quota(cls) -> bool: def increment_quota(cls) -> bool: cls._lock.acquire() result = False - if cls._vulnerability_quota < MAX_VULNERABILITIES_PER_REQUEST: + if cls._vulnerability_quota < asm_config._iast_max_vulnerabilities_per_requests: cls._vulnerability_quota += 1 result = True cls._lock.release() @@ -86,12 +81,13 @@ class OverheadControl(object): """ _lock = threading.Lock() - _request_quota = MAX_REQUESTS + _request_quota = asm_config._iast_max_concurrent_requests _vulnerabilities: Set[Type[Operation]] = set() _sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0) def reconfigure(self): self._sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0) + self._request_quota = asm_config._iast_max_concurrent_requests def acquire_request(self, span: Span) -> bool: """Decide whether if IAST analysis will be done for this request. diff --git a/ddtrace/appsec/_iast/_pytest_plugin.py b/ddtrace/appsec/_iast/_pytest_plugin.py new file mode 100644 index 00000000000..672acc4a031 --- /dev/null +++ b/ddtrace/appsec/_iast/_pytest_plugin.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +import dataclasses +import json +from typing import List + +from ddtrace.appsec._constants import IAST +from ddtrace.appsec._iast._utils import _is_iast_enabled +from ddtrace.appsec._iast.reporter import Vulnerability +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +@dataclasses.dataclass(unsafe_hash=True) +class VulnerabilityFoundInTest(Vulnerability): + test: str + + +try: + import pytest + + @pytest.fixture(autouse=_is_iast_enabled()) + def ddtrace_iast(request, ddspan): + """ + Extract the vulnerabilities discovered in tests. + Optionally output the test as failed if vulnerabilities are found. + """ + yield + data = ddspan.get_tag(IAST.JSON) + if not data: + return + + json_data = json.loads(data) + + if json_data["vulnerabilities"]: + for vuln in json_data["vulnerabilities"]: + vuln_data.append( + VulnerabilityFoundInTest( + test=request.node.nodeid, + type=vuln["type"], + evidence=vuln["evidence"], + location=vuln["location"], + ) + ) + + if request.config.getoption("ddtrace-iast-fail-tests"): + vulns = ", ".join([vuln["type"] for vuln in json_data["vulnerabilities"]]) + pytest.fail(f"There are vulnerabilities in the code: {vulns}") + +except ImportError: + log.debug("pytest not imported") + + +vuln_data: List[VulnerabilityFoundInTest] = [] + + +def extract_code_snippet(filepath, line_number, context=3): + """Extracts code snippet around the given line number.""" + try: + with open(filepath, "r") as file: + lines = file.readlines() + start = max(0, line_number - context - 1) + end = min(len(lines), line_number + context) + code = lines[start:end] + return code, start # Return lines and starting line number + except Exception: + log.debug("Error reading file %s", filepath, exc_info=True) + return "", 0 + + +def print_iast_report(terminalreporter): + if not _is_iast_enabled(): + return + + if not vuln_data: + terminalreporter.write_sep("=", "Datadog Code Security Report", purple=True, bold=True) + terminalreporter.write_line("No vulnerabilities found.") + return + + terminalreporter.write_sep("=", "Datadog Code Security Report", purple=True, bold=True) + + for entry in vuln_data: + terminalreporter.write_line(f"Test: {entry.test}", bold=True) + high_severity = entry.type.endswith("INJECTION") + terminalreporter.write_line( + f"Vulnerability: {entry.type}", + # TODO(@gnufede): Add remediation links, where remediation is a dict with the vulnerability as key + # f" - \033]8;;{remediation[entry.type]}\033\\Remediation\033]8;;\033\\ \n", + bold=True, + red=high_severity, + yellow=not high_severity, + ) + terminalreporter.write_line(f"Location: {entry.location['path']}:{entry.location['line']}") + code_snippet, start_line = extract_code_snippet(entry.location["path"], entry.location["line"]) + + if code_snippet: + terminalreporter.write_line("Code:") + + if start_line is not None: + for i, line in enumerate(code_snippet, start=start_line + 1): + if i == entry.location["line"]: + terminalreporter.write(f"{i:4d}: {line}", bold=True, purple=True) + else: + terminalreporter.write(f"{i:4d}: {line}") + else: + # If there's an error extracting the code snippet + terminalreporter.write_line(code_snippet[0], bold=True) + + terminalreporter.write_sep("=") diff --git a/ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectModulo.cpp b/ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectModulo.cpp index a08f76d9f3d..b7454de26f8 100644 --- a/ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectModulo.cpp +++ b/ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectModulo.cpp @@ -2,7 +2,7 @@ #include "Helpers.h" static PyObject* -do_modulo(PyObject* text, PyObject* insert_tuple_or_obj) +do_modulo(PyObject* text, PyObject* insert_tuple_or_obj, py::object py_candidate_text, py::object py_candidate_tuple) { PyObject* result = nullptr; @@ -13,18 +13,22 @@ do_modulo(PyObject* text, PyObject* insert_tuple_or_obj) Py_INCREF(insert_tuple); } else { insert_tuple = PyTuple_Pack(1, insert_tuple_or_obj); - if (insert_tuple == nullptr) { - return nullptr; - } } - if (PyUnicode_Check(text)) { + if (PyUnicode_Check(text) && insert_tuple != nullptr) { result = PyUnicode_Format(text, insert_tuple); - } else if (PyBytes_Check(text) or PyByteArray_Check(text)) { - auto method_name = PyUnicode_FromString("__mod__"); - result = PyObject_CallMethodObjArgs(text, method_name, insert_tuple, nullptr); - Py_DECREF(method_name); } else { + try { + py::object res_py = py_candidate_text.attr("__mod__")(py_candidate_tuple); + PyObject* res_pyo = res_py.ptr(); + if (res_pyo != nullptr) { + Py_INCREF(res_pyo); + } + return res_pyo; + } catch (py::error_already_set& e) { + e.restore(); + return nullptr; + } } Py_DECREF(insert_tuple); if (has_pyerr()) { @@ -49,21 +53,7 @@ api_modulo_aspect(PyObject* self, PyObject* const* args, const Py_ssize_t nargs) // Lambda to get the result of the modulo operation auto get_result = [&]() -> PyObject* { - PyObject* res = do_modulo(candidate_text, candidate_tuple); - if (res == nullptr) { - try { - py::object res_py = py_candidate_text.attr("__mod__")(py_candidate_tuple); - PyObject* res_pyo = res_py.ptr(); - if (res_pyo != nullptr) { - Py_INCREF(res_pyo); - } - return res_pyo; - } catch (py::error_already_set& e) { - e.restore(); - return nullptr; - } - } - return res; + return do_modulo(candidate_text, candidate_tuple, py_candidate_text, py_candidate_tuple); }; TRY_CATCH_ASPECT("modulo_aspect", return get_result(), , { @@ -107,7 +97,10 @@ api_modulo_aspect(PyObject* self, PyObject* const* args, const Py_ssize_t nargs) } py::tuple formatted_parameters(list_formatted_parameters); - PyObject* applied_params = do_modulo(StringToPyObject(fmttext, py_str_type).ptr(), formatted_parameters.ptr()); + PyObject* applied_params = do_modulo(StringToPyObject(fmttext, py_str_type).ptr(), + formatted_parameters.ptr(), + StringToPyObject(fmttext, py_str_type), + formatted_parameters); if (applied_params == nullptr) { return get_result(); } diff --git a/ddtrace/appsec/_iast/_taint_tracking/__init__.py b/ddtrace/appsec/_iast/_taint_tracking/__init__.py index a6bad81f64c..839f4b3537f 100644 --- a/ddtrace/appsec/_iast/_taint_tracking/__init__.py +++ b/ddtrace/appsec/_iast/_taint_tracking/__init__.py @@ -1,10 +1,14 @@ from io import BytesIO from io import StringIO import itertools +from typing import TYPE_CHECKING # noqa:F401 from typing import Any -from typing import Sequence from typing import Tuple + +if TYPE_CHECKING: # pragma: no cover + from typing import Sequence # noqa:F401 + from ddtrace.internal._unpatched import _threading as threading from ddtrace.internal.logger import get_logger @@ -263,7 +267,9 @@ def trace_calls_and_returns(frame, event, arg): threading.settrace(trace_calls_and_returns) -def copy_ranges_to_string(pyobject: str, ranges: Sequence[TaintRange]) -> str: +def copy_ranges_to_string(pyobject, ranges): + # type: (str, Sequence[TaintRange]) -> str + # NB this function uses comment-based type annotation because TaintRange is conditionally imported if not isinstance(pyobject, IAST.TAINTEABLE_TYPES): # type: ignore[misc] return pyobject @@ -297,7 +303,9 @@ def copy_ranges_to_string(pyobject: str, ranges: Sequence[TaintRange]) -> str: # Given a list of ranges, try to match them with the iterable and return a new iterable with a new range applied that # matched the original one Source. If no range matches, take the Source from the first one. -def copy_ranges_to_iterable_with_strings(iterable: Sequence[str], ranges: Sequence[TaintRange]) -> Sequence[str]: +def copy_ranges_to_iterable_with_strings(iterable, ranges): + # type: (Sequence[str], Sequence[TaintRange]) -> Sequence[str] + # NB this function uses comment-based type annotation because TaintRange is conditionally imported iterable_type = type(iterable) new_result = [] diff --git a/ddtrace/appsec/_iast/reporter.py b/ddtrace/appsec/_iast/reporter.py index 249d8e21278..62cc2ee8d65 100644 --- a/ddtrace/appsec/_iast/reporter.py +++ b/ddtrace/appsec/_iast/reporter.py @@ -11,6 +11,8 @@ from typing import Tuple import zlib +from ddtrace.appsec._constants import STACK_TRACE +from ddtrace.appsec._exploit_prevention.stack_traces import report_stack from ddtrace.appsec._iast._evidence_redaction import sensitive_handler from ddtrace.appsec._iast._utils import _get_source_index from ddtrace.appsec._iast.constants import VULN_INSECURE_HASHING_TYPE @@ -75,9 +77,19 @@ class Vulnerability(NotNoneDictable): evidence: Evidence location: Location hash: int = dataclasses.field(init=False, compare=False, hash=("PYTEST_CURRENT_TEST" in os.environ), repr=False) + stackId: Optional[str] = dataclasses.field(init=False, compare=False) def __post_init__(self): + # avoid circular import + from ddtrace.appsec._iast._iast_request_context import get_iast_stacktrace_id + self.hash = zlib.crc32(repr(self).encode()) + stacktrace_id = get_iast_stacktrace_id() + self.stackId = None + if stacktrace_id: + str_id = str(stacktrace_id) + if report_stack(stack_id=str_id, namespace=STACK_TRACE.IAST): + self.stackId = str_id def __repr__(self): return f"Vulnerability(type='{self.type}', location={self.location})" @@ -121,6 +133,74 @@ def __hash__(self) -> int: """ return reduce(operator.xor, (hash(obj) for obj in set(self.sources) | self.vulnerabilities)) + def _merge(self, other: "IastSpanReporter") -> None: + """ + Merges the current IAST span reporter with another IAST span reporter. + + Args: + - other (IastSpanReporter): IAST span reporter to merge. + """ + len_previous_sources = len(self.sources) + self.sources = self.sources + other.sources + self._update_vulnerabilities(other, len_previous_sources) + + def _update_vulnerabilities(self, other: "IastSpanReporter", offset: int): + for vuln in other.vulnerabilities: + if ( + hasattr(vuln, "evidence") + and hasattr(vuln.evidence, "valueParts") + and vuln.evidence.valueParts is not None + ): + for part in vuln.evidence.valueParts: + if "source" in part: + part["source"] = part["source"] + offset + self.vulnerabilities.add(vuln) + + def _from_json(self, json_str: str): + """ + Initializes the IAST span reporter from a JSON string. + + Args: + - json_str (str): JSON string. + """ + from ._taint_tracking import str_to_origin + + data = json.loads(json_str) + self.sources = [] + for i in data["sources"]: + source = Source( + origin=str_to_origin(i["origin"]), + name=i["name"], + ) + if "value" in i: + source.value = i["value"] + if "redacted" in i: + source.redacted = i["redacted"] + if "pattern" in i: + source.pattern = i["pattern"] + self.sources.append(source) + + self.vulnerabilities = set() + for i in data["vulnerabilities"]: + evidence = Evidence() + if "ranges" in i["evidence"]: + evidence._ranges = i["evidence"]["ranges"] + if "value" in i["evidence"]: + evidence.value = i["evidence"]["value"] + if "valueParts" in i["evidence"]: + evidence.valueParts = i["evidence"]["valueParts"] + if "dialect" in i["evidence"]: + evidence.dialect = i["evidence"]["dialect"] + self.vulnerabilities.add( + Vulnerability( + type=i["type"], + evidence=evidence, + location=Location( + spanId=i["location"]["spanId"], path=i["location"]["path"], line=i["location"]["line"] + ), + ) + ) + def _to_dict(self): return { "sources": [i._to_dict() for i in self.sources], diff --git a/ddtrace/appsec/_iast/taint_sinks/_base.py b/ddtrace/appsec/_iast/taint_sinks/_base.py index 7db79d33fd8..16eaac2452c 100644 --- a/ddtrace/appsec/_iast/taint_sinks/_base.py +++ b/ddtrace/appsec/_iast/taint_sinks/_base.py @@ -61,9 +61,11 @@ def wrapper(wrapped: Callable, instance: Any, args: Any, kwargs: Any) -> Any: vulnerability and update the context with the report information. """ if not is_iast_request_enabled(): - log.debug( - "[IAST] VulnerabilityBase.wrapper. No request quota or this vulnerability is outside the context" - ) + if _is_iast_debug_enabled(): + log.debug( + "[IAST] VulnerabilityBase.wrapper. No request quota or this vulnerability " + "is outside the context" + ) return wrapped(*args, **kwargs) elif cls.has_quota(): return func(wrapped, instance, args, kwargs) diff --git a/ddtrace/appsec/_processor.py b/ddtrace/appsec/_processor.py index 4ba8222c89a..06328d1201a 100644 --- a/ddtrace/appsec/_processor.py +++ b/ddtrace/appsec/_processor.py @@ -21,10 +21,12 @@ from ddtrace.appsec._constants import EXPLOIT_PREVENTION from ddtrace.appsec._constants import FINGERPRINTING from ddtrace.appsec._constants import SPAN_DATA_NAMES +from ddtrace.appsec._constants import STACK_TRACE from ddtrace.appsec._constants import WAF_ACTIONS from ddtrace.appsec._constants import WAF_DATA_NAMES from ddtrace.appsec._ddwaf import DDWaf_result from ddtrace.appsec._ddwaf.ddwaf_types import ddwaf_context_capsule +from ddtrace.appsec._exploit_prevention.stack_traces import report_stack from ddtrace.appsec._metrics import _set_waf_init_metric from ddtrace.appsec._metrics import _set_waf_request_metrics from ddtrace.appsec._metrics import _set_waf_updates_metric @@ -325,10 +327,8 @@ def _waf_action( blocked = parameters blocked[WAF_ACTIONS.TYPE] = "none" elif action == WAF_ACTIONS.STACK_ACTION: - from ddtrace.appsec._exploit_prevention.stack_traces import report_stack - stack_trace_id = parameters["stack_id"] - report_stack("exploit detected", span, crop_trace, stack_id=stack_trace_id) + report_stack("exploit detected", span, crop_trace, stack_id=stack_trace_id, namespace=STACK_TRACE.RASP) for rule in waf_results.data: rule[EXPLOIT_PREVENTION.STACK_TRACE_ID] = stack_trace_id diff --git a/ddtrace/appsec/_utils.py b/ddtrace/appsec/_utils.py index e2d46fe098e..bb8739654c5 100644 --- a/ddtrace/appsec/_utils.py +++ b/ddtrace/appsec/_utils.py @@ -5,6 +5,7 @@ from ddtrace.appsec._constants import API_SECURITY from ddtrace.appsec._constants import APPSEC +from ddtrace.internal._unpatched import unpatched_json_loads from ddtrace.internal.compat import to_unicode from ddtrace.internal.logger import get_logger from ddtrace.internal.utils.http import _get_blocked_template # noqa:F401 @@ -17,8 +18,6 @@ def parse_response_body(raw_body): - import json - import xmltodict from ddtrace.appsec import _asm_request_context @@ -54,7 +53,7 @@ def access_body(bd): try: # TODO handle charset if "json" in content_type: - req_body = json.loads(access_body(raw_body)) + req_body = unpatched_json_loads(access_body(raw_body)) elif "xml" in content_type: req_body = xmltodict.parse(access_body(raw_body)) else: diff --git a/ddtrace/appsec/iast/__init__.py b/ddtrace/appsec/iast/__init__.py index c72c2be9167..ece53d092cb 100644 --- a/ddtrace/appsec/iast/__init__.py +++ b/ddtrace/appsec/iast/__init__.py @@ -1,2 +1,3 @@ +from ddtrace.appsec._iast import _iast_pytest_activation # noqa: F401 from ddtrace.appsec._iast import ddtrace_iast_flask_patch # noqa: F401 from ddtrace.appsec._iast import enable_iast_propagation # noqa: F401 diff --git a/ddtrace/contrib/azure_functions/__init__.py b/ddtrace/contrib/azure_functions/__init__.py new file mode 100644 index 00000000000..208b971efaa --- /dev/null +++ b/ddtrace/contrib/azure_functions/__init__.py @@ -0,0 +1,46 @@ +""" +The azure_functions integration traces all http requests to your Azure Function app. + +Enabling +~~~~~~~~ + +Use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(azure_functions=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.azure_functions["service"] + + The service name reported by default for azure_functions instances. + + This option can also be set with the ``DD_SERVICE`` environment + variable. + + Default: ``"azure_functions"`` + +""" + +from ddtrace.internal.utils.importlib import require_modules + + +required_modules = ["azure.functions"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + # Required to allow users to import from `ddtrace.contrib.azure_functions.patch` directly + import warnings as _w + + with _w.catch_warnings(): + _w.simplefilter("ignore", DeprecationWarning) + from . import patch as _ # noqa: F401, I001 + + # Expose public methods + from ddtrace.contrib.internal.azure_functions.patch import get_version + from ddtrace.contrib.internal.azure_functions.patch import patch + from ddtrace.contrib.internal.azure_functions.patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/ddtrace/contrib/azure_functions/patch.py b/ddtrace/contrib/azure_functions/patch.py new file mode 100644 index 00000000000..1a23613972d --- /dev/null +++ b/ddtrace/contrib/azure_functions/patch.py @@ -0,0 +1,14 @@ +from ddtrace.contrib.internal.azure_functions.patch import * # noqa: F403 +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate + + +def __getattr__(name): + deprecate( + ("%s.%s is deprecated" % (__name__, name)), + category=DDTraceDeprecationWarning, + ) + + if name in globals(): + return globals()[name] + raise AttributeError("%s has no attribute %s", __name__, name) diff --git a/ddtrace/contrib/internal/azure_functions/patch.py b/ddtrace/contrib/internal/azure_functions/patch.py new file mode 100644 index 00000000000..15089a2e733 --- /dev/null +++ b/ddtrace/contrib/internal/azure_functions/patch.py @@ -0,0 +1,85 @@ +import azure.functions as azure_functions +from wrapt import wrap_function_wrapper as _w + +from ddtrace import config +from ddtrace.contrib.trace_utils import int_service +from ddtrace.contrib.trace_utils import unwrap as _u +from ddtrace.ext import SpanTypes +from ddtrace.internal import core +from ddtrace.internal.schema import schematize_cloud_faas_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.pin import Pin + + +config._add( + "azure_functions", + { + "_default_service": schematize_service_name("azure_functions"), + }, +) + + +def get_version(): + # type: () -> str + return getattr(azure_functions, "__version__", "") + + +def patch(): + """ + Patch `azure.functions` module for tracing + """ + # Check to see if we have patched azure.functions yet or not + if getattr(azure_functions, "_datadog_patch", False): + return + azure_functions._datadog_patch = True + + Pin().onto(azure_functions.FunctionApp) + _w("azure.functions", "FunctionApp.route", _patched_route) + + +def _patched_route(wrapped, instance, args, kwargs): + trigger = "Http" + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + def _wrapper(func): + function_name = func.__name__ + + def wrap_function(req: azure_functions.HttpRequest, context: azure_functions.Context): + operation_name = schematize_cloud_faas_operation( + "azure.functions.invoke", cloud_provider="azure", cloud_service="functions" + ) + with core.context_with_data( + "azure.functions.patched_route_request", + span_name=operation_name, + pin=pin, + service=int_service(pin, config.azure_functions), + span_type=SpanTypes.SERVERLESS, + ) as ctx, ctx.span: + ctx.set_item("req_span", ctx.span) + core.dispatch("azure.functions.request_call_modifier", (ctx, config.azure_functions, req)) + res = None + try: + res = func(req) + return res + finally: + core.dispatch( + "azure.functions.start_response", (ctx, config.azure_functions, res, function_name, trigger) + ) + + # Needed to correctly display function name when running 'func start' locally + wrap_function.__name__ = function_name + + return wrapped(*args, **kwargs)(wrap_function) + + return _wrapper + + +def unpatch(): + if not getattr(azure_functions, "_datadog_patch", False): + return + azure_functions._datadog_patch = False + + _u(azure_functions.FunctionApp, "route") diff --git a/ddtrace/contrib/internal/celery/app.py b/ddtrace/contrib/internal/celery/app.py index b61585097a7..42eed2cb468 100644 --- a/ddtrace/contrib/internal/celery/app.py +++ b/ddtrace/contrib/internal/celery/app.py @@ -133,10 +133,6 @@ def _traced_apply_async_inner(func, instance, args, kwargs): if task_span: task_span.set_exc_info(*sys.exc_info()) - prerun_span = core.get_item("prerun_span") - if prerun_span: - prerun_span.set_exc_info(*sys.exc_info()) - raise finally: task_span = core.get_item("task_span") @@ -147,11 +143,4 @@ def _traced_apply_async_inner(func, instance, args, kwargs): ) task_span.finish() - prerun_span = core.get_item("prerun_span") - if prerun_span: - log.debug( - "The task_postrun signal was not called, so manually closing span: %s", prerun_span._pprint() - ) - prerun_span.finish() - return _traced_apply_async_inner diff --git a/ddtrace/contrib/internal/celery/signals.py b/ddtrace/contrib/internal/celery/signals.py index 76f07ee7524..8f27fcc53b0 100644 --- a/ddtrace/contrib/internal/celery/signals.py +++ b/ddtrace/contrib/internal/celery/signals.py @@ -54,9 +54,6 @@ def trace_prerun(*args, **kwargs): service = config.celery["worker_service_name"] span = pin.tracer.trace(c.WORKER_ROOT_SPAN, service=service, resource=task.name, span_type=SpanTypes.WORKER) - # Store an item called "prerun span" in case task_postrun doesn't get called - core.set_item("prerun_span", span) - # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) diff --git a/ddtrace/contrib/internal/kafka/patch.py b/ddtrace/contrib/internal/kafka/patch.py index 225c0f82877..b8e8fce007d 100644 --- a/ddtrace/contrib/internal/kafka/patch.py +++ b/ddtrace/contrib/internal/kafka/patch.py @@ -247,7 +247,7 @@ def _instrument_message(messages, pin, start_ns, instance, err): name=schematize_messaging_operation(kafkax.CONSUME, provider="kafka", direction=SpanDirection.PROCESSING), service=trace_utils.ext_service(pin, config.kafka), span_type=SpanTypes.WORKER, - child_of=ctx if ctx is not None else pin.tracer.context_provider.active(), + child_of=ctx if ctx is not None and ctx.trace_id is not None else pin.tracer.context_provider.active(), activate=True, ) as span: # reset span start time to before function call diff --git a/ddtrace/contrib/internal/langchain/patch.py b/ddtrace/contrib/internal/langchain/patch.py index ce72e1affff..fa2332d70f2 100644 --- a/ddtrace/contrib/internal/langchain/patch.py +++ b/ddtrace/contrib/internal/langchain/patch.py @@ -1,4 +1,3 @@ -import json import os import sys from typing import Any @@ -954,17 +953,22 @@ def _on_span_started(span: Span): span.set_tag_str("langchain.request.inputs.%d.%s" % (idx, k), integration.trunc(str(v))) def _on_span_finished(span: Span, streamed_chunks): + maybe_parser = instance.steps[-1] if instance.steps else None if ( streamed_chunks and langchain_core - and isinstance(instance.steps[-1], langchain_core.output_parsers.JsonOutputParser) + and isinstance(maybe_parser, langchain_core.output_parsers.JsonOutputParser) ): - # it's possible that the chain has a json output parser - # this will have already concatenated the chunks into a json object + # it's possible that the chain has a json output parser type + # this will have already concatenated the chunks into an object - # it's also possible the json output parser isn't the last step, + # it's also possible the this parser type isn't the last step, # but one of the last steps, in which case we won't act on it here - content = json.dumps(streamed_chunks[-1]) + result = streamed_chunks[-1] + if maybe_parser.__class__.__name__ == "JsonOutputParser": + content = safe_json(result) + else: + content = str(result) else: # best effort to join chunks together content = "".join([str(chunk) for chunk in streamed_chunks]) diff --git a/ddtrace/contrib/pytest/_benchmark_utils.py b/ddtrace/contrib/pytest/_benchmark_utils.py new file mode 100644 index 00000000000..77dd6061b13 --- /dev/null +++ b/ddtrace/contrib/pytest/_benchmark_utils.py @@ -0,0 +1,35 @@ +import pytest + +from ddtrace.contrib.pytest._utils import _get_test_id_from_item +from ddtrace.contrib.pytest_benchmark.constants import PLUGIN_METRICS_V2 +from ddtrace.internal.logger import get_logger +from ddtrace.internal.test_visibility._benchmark_mixin import BenchmarkDurationData +from ddtrace.internal.test_visibility.api import InternalTest + + +log = get_logger(__name__) + + +def _set_benchmark_data_from_item(item: pytest.Item) -> None: + try: + fixture = hasattr(item, "funcargs") and item.funcargs.get("benchmark") + + if not fixture or not fixture.stats: + return + + stat_object = item.funcargs.get("benchmark").stats.stats + + data_kwargs = {} + + for data_attr, stats_attr in PLUGIN_METRICS_V2.items(): + if hasattr(stat_object, stats_attr): + data_kwargs[data_attr] = getattr(stat_object, stats_attr) + + test_id = _get_test_id_from_item(item) + benchmark_data = BenchmarkDurationData(**data_kwargs) + + InternalTest.set_benchmark_data(test_id, benchmark_data, is_benchmark=True) + + except Exception: # noqa: E722 + log.debug("Unable to set benchmark data for item %s", item, exc_info=True) + return None diff --git a/ddtrace/contrib/pytest/_plugin_v2.py b/ddtrace/contrib/pytest/_plugin_v2.py index b9cf7daf564..f1da8d2db11 100644 --- a/ddtrace/contrib/pytest/_plugin_v2.py +++ b/ddtrace/contrib/pytest/_plugin_v2.py @@ -13,6 +13,7 @@ from ddtrace.contrib.internal.coverage.patch import run_coverage_report from ddtrace.contrib.internal.coverage.utils import _is_coverage_invoked_by_coverage_run from ddtrace.contrib.internal.coverage.utils import _is_coverage_patched +from ddtrace.contrib.pytest._benchmark_utils import _set_benchmark_data_from_item from ddtrace.contrib.pytest._plugin_v1 import _extract_reason from ddtrace.contrib.pytest._plugin_v1 import _is_pytest_cov_enabled from ddtrace.contrib.pytest._types import _pytest_report_teststatus_return_type @@ -195,6 +196,12 @@ def pytest_configure(config: pytest_Config) -> None: enable_test_visibility(config=dd_config.pytest) if _is_pytest_cov_enabled(config): patch_coverage() + + # pytest-bdd plugin support + if config.pluginmanager.hasplugin("pytest-bdd"): + from ddtrace.contrib.pytest._pytest_bdd_subplugin import _PytestBddSubPlugin + + config.pluginmanager.register(_PytestBddSubPlugin(), "_datadog-pytest-bdd") else: # If the pytest ddtrace plugin is not enabled, we should disable CI Visibility, as it was enabled during # pytest_load_initial_conftests @@ -256,8 +263,16 @@ def _pytest_collection_finish(session) -> None: InternalTestSuite.discover(suite_id) item_path = Path(item.path if hasattr(item, "path") else item.fspath).absolute() + workspace_path = InternalTestSession.get_workspace_path() + if workspace_path: + try: + repo_relative_path = item_path.relative_to(workspace_path) + except ValueError: + repo_relative_path = item_path + else: + repo_relative_path = item_path - item_codeowners = InternalTestSession.get_path_codeowners(item_path) + item_codeowners = InternalTestSession.get_path_codeowners(repo_relative_path) if repo_relative_path else None source_file_info = _get_source_file_info(item, item_path) @@ -449,6 +464,10 @@ def _pytest_runtest_makereport(item: pytest.Item, call: pytest_CallInfo, outcome if test_outcome.status is None and call.when != "teardown": return + # Support for pytest-benchmark plugin + if item.config.pluginmanager.hasplugin("benchmark"): + _set_benchmark_data_from_item(item) + # Record a result if we haven't already recorded it: if not InternalTest.is_finished(test_id): InternalTest.finish(test_id, test_outcome.status, test_outcome.skip_reason, test_outcome.exc_info) @@ -525,6 +544,13 @@ def _pytest_terminal_summary_post_yield(terminalreporter, failed_reports_initial @pytest.hookimpl(hookwrapper=True, tryfirst=True) def pytest_terminal_summary(terminalreporter, exitstatus, config): """Report flaky or failed tests""" + try: + from ddtrace.appsec._iast._pytest_plugin import print_iast_report + + print_iast_report(terminalreporter) + except Exception: # noqa: E722 + log.debug("Encountered error during code security summary", exc_info=True) + if not is_test_visibility_enabled(): yield return diff --git a/ddtrace/contrib/pytest/_pytest_bdd_subplugin.py b/ddtrace/contrib/pytest/_pytest_bdd_subplugin.py new file mode 100644 index 00000000000..7c964af3d59 --- /dev/null +++ b/ddtrace/contrib/pytest/_pytest_bdd_subplugin.py @@ -0,0 +1,110 @@ +"""Provides functionality to support the pytest-bdd plugin as part of the ddtrace integration + +NOTE: This replaces the previous ddtrace.pytest_bdd plugin. + +This plugin mainly modifies the names of the test, its suite, and parameters. It does not, however modify the tests' +suite from the perspective of Test Visibility data. + +The plugin is only instantiated and added if the pytest-bdd plugin itself is installed and enabled, because the hook +implementations will cause errors unless the hookspecs are added by the original plugin. +""" +from pathlib import Path +import sys + +import pytest + +from ddtrace.contrib.pytest._utils import _get_test_id_from_item +from ddtrace.contrib.pytest_bdd import get_version +from ddtrace.contrib.pytest_bdd._plugin import _extract_span +from ddtrace.contrib.pytest_bdd._plugin import _get_step_func_args_json +from ddtrace.contrib.pytest_bdd._plugin import _store_span +from ddtrace.contrib.pytest_bdd.constants import FRAMEWORK +from ddtrace.contrib.pytest_bdd.constants import STEP_KIND +from ddtrace.ext import test +from ddtrace.internal.logger import get_logger +from ddtrace.internal.test_visibility.api import InternalTest +from ddtrace.internal.test_visibility.api import InternalTestSession + + +log = get_logger(__name__) + + +def _get_workspace_relative_path(feature_path_str: str) -> Path: + feature_path = Path(feature_path_str).resolve() + workspace_path = InternalTestSession.get_workspace_path() + if workspace_path: + try: + return feature_path.relative_to(workspace_path) + except ValueError: # noqa: E722 + log.debug("Feature path %s is not relative to workspace path %s", feature_path, workspace_path) + return feature_path + + +class _PytestBddSubPlugin: + def __init__(self): + self.framework_version = get_version() + + @staticmethod + @pytest.hookimpl(tryfirst=True) + def pytest_bdd_before_scenario(request, feature, scenario): + test_id = _get_test_id_from_item(request.node) + feature_path = _get_workspace_relative_path(scenario.feature.filename) + codeowners = InternalTestSession.get_path_codeowners(feature_path) + + InternalTest.overwrite_attributes( + test_id, name=scenario.name, suite_name=str(feature_path), codeowners=codeowners + ) + + @pytest.hookimpl(tryfirst=True) + def pytest_bdd_before_step(self, request, feature, scenario, step, step_func): + feature_test_id = _get_test_id_from_item(request.node) + + feature_span = InternalTest.get_span(feature_test_id) + + tracer = InternalTestSession.get_tracer() + if tracer is None: + return + + span = tracer.start_span( + step.type, + resource=step.name, + span_type=STEP_KIND, + child_of=feature_span, + activate=True, + ) + span.set_tag_str("component", "pytest_bdd") + + span.set_tag(test.FRAMEWORK, FRAMEWORK) + span.set_tag(test.FRAMEWORK_VERSION, self.framework_version) + + feature_path = _get_workspace_relative_path(scenario.feature.filename) + + span.set_tag(test.FILE, str(feature_path)) + span.set_tag(test.CODEOWNERS, InternalTestSession.get_path_codeowners(feature_path)) + + _store_span(step_func, span) + + @staticmethod + @pytest.hookimpl(trylast=True) + def pytest_bdd_after_step(request, feature, scenario, step, step_func, step_func_args): + span = _extract_span(step_func) + if span is not None: + step_func_args_json = _get_step_func_args_json(step, step_func, step_func_args) + if step_func_args: + span.set_tag(test.PARAMETERS, step_func_args_json) + span.finish() + + @staticmethod + def pytest_bdd_step_error(request, feature, scenario, step, step_func, step_func_args, exception): + span = _extract_span(step_func) + if span is not None: + if hasattr(exception, "__traceback__"): + tb = exception.__traceback__ + else: + # PY2 compatibility workaround + _, _, tb = sys.exc_info() + step_func_args_json = _get_step_func_args_json(step, step_func, step_func_args) + if step_func_args: + span.set_tag(test.PARAMETERS, step_func_args_json) + span.set_exc_info(type(exception), exception, tb) + span.finish() diff --git a/ddtrace/contrib/pytest/plugin.py b/ddtrace/contrib/pytest/plugin.py index a8da8c3a5ca..a09a81be49a 100644 --- a/ddtrace/contrib/pytest/plugin.py +++ b/ddtrace/contrib/pytest/plugin.py @@ -15,6 +15,8 @@ import pytest +from ddtrace.appsec._iast._pytest_plugin import ddtrace_iast # noqa:F401 +from ddtrace.appsec._iast._utils import _is_iast_enabled from ddtrace.contrib.pytest._utils import _USE_PLUGIN_V2 from ddtrace.contrib.pytest._utils import _extract_span from ddtrace.contrib.pytest._utils import _pytest_version_supports_itr @@ -67,10 +69,22 @@ def pytest_addoption(parser): help=DDTRACE_INCLUDE_CLASS_HELP_MSG, ) + group._addoption( + "--ddtrace-iast-fail-tests", + action="store_true", + dest="ddtrace-iast-fail-tests", + default=False, + help=DDTRACE_INCLUDE_CLASS_HELP_MSG, + ) + parser.addini("ddtrace", DDTRACE_HELP_MSG, type="bool") parser.addini("no-ddtrace", DDTRACE_HELP_MSG, type="bool") parser.addini("ddtrace-patch-all", PATCH_ALL_HELP_MSG, type="bool") parser.addini("ddtrace-include-class-name", DDTRACE_INCLUDE_CLASS_HELP_MSG, type="bool") + if _is_iast_enabled(): + from ddtrace.appsec._iast import _iast_pytest_activation + + _iast_pytest_activation() # Version-specific pytest hooks diff --git a/ddtrace/contrib/pytest_bdd/plugin.py b/ddtrace/contrib/pytest_bdd/plugin.py index 68da4a3a3c5..1dc714c89c5 100644 --- a/ddtrace/contrib/pytest_bdd/plugin.py +++ b/ddtrace/contrib/pytest_bdd/plugin.py @@ -1,9 +1,20 @@ +from ddtrace import DDTraceDeprecationWarning +from ddtrace.contrib.pytest._utils import _USE_PLUGIN_V2 from ddtrace.contrib.pytest.plugin import is_enabled as is_ddtrace_enabled +from ddtrace.vendor.debtcollector import deprecate def pytest_configure(config): if config.pluginmanager.hasplugin("pytest-bdd") and config.pluginmanager.hasplugin("ddtrace"): - if is_ddtrace_enabled(config): - from ._plugin import _PytestBddPlugin + if not _USE_PLUGIN_V2: + if is_ddtrace_enabled(config): + from ._plugin import _PytestBddPlugin - config.pluginmanager.register(_PytestBddPlugin(), "_datadog-pytest-bdd") + deprecate( + "the ddtrace.pytest_bdd plugin is deprecated", + message="it will be integrated with the main pytest ddtrace plugin", + removal_version="3.0.0", + category=DDTraceDeprecationWarning, + ) + + config.pluginmanager.register(_PytestBddPlugin(), "_datadog-pytest-bdd") diff --git a/ddtrace/contrib/pytest_benchmark/constants.py b/ddtrace/contrib/pytest_benchmark/constants.py index 974208509f7..b4c4f7f5b27 100644 --- a/ddtrace/contrib/pytest_benchmark/constants.py +++ b/ddtrace/contrib/pytest_benchmark/constants.py @@ -56,3 +56,24 @@ STATISTICS_STDDEV_OUTLIERS: PLUGIN_STDDEV_OUTLIERS, STATISTICS_TOTAL: PLUGIN_TOTAL, } + +PLUGIN_METRICS_V2 = { + "duration_mean": PLUGIN_MEAN, + "duration_runs": PLUGIN_ROUNDS, + "statistics_hd15iqr": PLUGIN_HD15IQR, + "statistics_iqr": PLUGIN_IQR, + "statistics_iqr_outliers": PLUGIN_IQR_OUTLIERS, + "statistics_ld15iqr": PLUGIN_LD15IQR, + "statistics_max": PLUGIN_MAX, + "statistics_mean": PLUGIN_MEAN, + "statistics_median": PLUGIN_MEDIAN, + "statistics_min": PLUGIN_MIN, + "statistics_n": PLUGIN_ROUNDS, + "statistics_ops": PLUGIN_OPS, + "statistics_outliers": PLUGIN_OUTLIERS, + "statistics_q1": PLUGIN_Q1, + "statistics_q3": PLUGIN_Q3, + "statistics_std_dev": PLUGIN_STDDEV, + "statistics_std_dev_outliers": PLUGIN_STDDEV_OUTLIERS, + "statistics_total": PLUGIN_TOTAL, +} diff --git a/ddtrace/contrib/pytest_benchmark/plugin.py b/ddtrace/contrib/pytest_benchmark/plugin.py index 461b5f931ac..4cb76148dbc 100644 --- a/ddtrace/contrib/pytest_benchmark/plugin.py +++ b/ddtrace/contrib/pytest_benchmark/plugin.py @@ -1,9 +1,19 @@ +from ddtrace import DDTraceDeprecationWarning +from ddtrace.contrib.pytest._utils import _USE_PLUGIN_V2 from ddtrace.contrib.pytest.plugin import is_enabled as is_ddtrace_enabled +from ddtrace.vendor.debtcollector import deprecate def pytest_configure(config): if config.pluginmanager.hasplugin("benchmark") and config.pluginmanager.hasplugin("ddtrace"): if is_ddtrace_enabled(config): - from ._plugin import _PytestBenchmarkPlugin + deprecate( + "this version of the ddtrace.pytest_benchmark plugin is deprecated", + message="it will be integrated with the main pytest ddtrace plugin", + removal_version="3.0.0", + category=DDTraceDeprecationWarning, + ) + if not _USE_PLUGIN_V2: + from ._plugin import _PytestBenchmarkPlugin - config.pluginmanager.register(_PytestBenchmarkPlugin(), "_datadog-pytest-benchmark") + config.pluginmanager.register(_PytestBenchmarkPlugin(), "_datadog-pytest-benchmark") diff --git a/ddtrace/debugging/_debugger.py b/ddtrace/debugging/_debugger.py index 6d0edf3a224..1c2429ba569 100644 --- a/ddtrace/debugging/_debugger.py +++ b/ddtrace/debugging/_debugger.py @@ -6,6 +6,7 @@ from pathlib import Path import sys import threading +from types import CodeType from types import FunctionType from types import ModuleType from types import TracebackType @@ -24,33 +25,22 @@ from ddtrace._trace.tracer import Tracer from ddtrace.debugging._config import di_config from ddtrace.debugging._function.discovery import FunctionDiscovery -from ddtrace.debugging._function.store import FullyNamedWrappedFunction +from ddtrace.debugging._function.store import FullyNamedContextWrappedFunction from ddtrace.debugging._function.store import FunctionStore from ddtrace.debugging._metrics import metrics from ddtrace.debugging._probe.model import FunctionLocationMixin from ddtrace.debugging._probe.model import FunctionProbe from ddtrace.debugging._probe.model import LineLocationMixin from ddtrace.debugging._probe.model import LineProbe -from ddtrace.debugging._probe.model import LogFunctionProbe -from ddtrace.debugging._probe.model import LogLineProbe -from ddtrace.debugging._probe.model import MetricFunctionProbe -from ddtrace.debugging._probe.model import MetricLineProbe from ddtrace.debugging._probe.model import Probe -from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe -from ddtrace.debugging._probe.model import SpanDecorationLineProbe -from ddtrace.debugging._probe.model import SpanFunctionProbe from ddtrace.debugging._probe.registry import ProbeRegistry from ddtrace.debugging._probe.remoteconfig import ProbePollerEvent from ddtrace.debugging._probe.remoteconfig import ProbePollerEventType from ddtrace.debugging._probe.remoteconfig import ProbeRCAdapter from ddtrace.debugging._probe.status import ProbeStatusLogger from ddtrace.debugging._signal.collector import SignalCollector -from ddtrace.debugging._signal.metric_sample import MetricSample from ddtrace.debugging._signal.model import Signal from ddtrace.debugging._signal.model import SignalState -from ddtrace.debugging._signal.snapshot import Snapshot -from ddtrace.debugging._signal.tracing import DynamicSpan -from ddtrace.debugging._signal.tracing import SpanDecoration from ddtrace.debugging._uploader import LogsIntakeUploaderV1 from ddtrace.debugging._uploader import UploaderProduct from ddtrace.internal import compat @@ -62,7 +52,6 @@ from ddtrace.internal.module import register_post_run_module_hook from ddtrace.internal.module import unregister_post_run_module_hook from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter -from ddtrace.internal.rate_limiter import RateLimitExceeded from ddtrace.internal.remoteconfig.worker import remoteconfig_poller from ddtrace.internal.service import Service from ddtrace.internal.wrapping.context import WrappingContext @@ -85,6 +74,9 @@ class DebuggerError(Exception): class DebuggerModuleWatchdog(ModuleWatchdog): _locations: Set[str] = set() + def transform(self, code: CodeType, module: ModuleType) -> CodeType: + return FunctionDiscovery.transformer(code, module) + @classmethod def register_origin_hook(cls, origin: Path, hook: ModuleHookType) -> None: if origin in cls._locations: @@ -190,35 +182,15 @@ def _open_signals(self) -> None: # for each probe. trace_context = self._tracer.current_trace_context() - if isinstance(probe, MetricFunctionProbe): - signal = MetricSample( - probe=probe, + try: + signal = Signal.from_probe( + probe, frame=frame, thread=thread, trace_context=trace_context, meter=self._probe_meter, ) - elif isinstance(probe, LogFunctionProbe): - signal = Snapshot( - probe=probe, - frame=frame, - thread=thread, - trace_context=trace_context, - ) - elif isinstance(probe, SpanFunctionProbe): - signal = DynamicSpan( - probe=probe, - frame=frame, - thread=thread, - trace_context=trace_context, - ) - elif isinstance(probe, SpanDecorationFunctionProbe): - signal = SpanDecoration( - probe=probe, - frame=frame, - thread=thread, - ) - else: + except TypeError: log.error("Unsupported probe type: %s", type(probe)) continue @@ -385,39 +357,19 @@ def _dd_debugger_hook(self, probe: Probe) -> None: instrumented code is running. """ try: - actual_frame = sys._getframe(1) - signal: Optional[Signal] = None - if isinstance(probe, MetricLineProbe): - signal = MetricSample( - probe=probe, - frame=actual_frame, + try: + signal = Signal.from_probe( + probe, + frame=sys._getframe(1), thread=threading.current_thread(), trace_context=self._tracer.current_trace_context(), meter=self._probe_meter, ) - elif isinstance(probe, LogLineProbe): - if probe.take_snapshot: - # TODO: Global limit evaluated before probe conditions - if self._global_rate_limiter.limit() is RateLimitExceeded: - return - - signal = Snapshot( - probe=probe, - frame=actual_frame, - thread=threading.current_thread(), - trace_context=self._tracer.current_trace_context(), - ) - elif isinstance(probe, SpanDecorationLineProbe): - signal = SpanDecoration( - probe=probe, - frame=actual_frame, - thread=threading.current_thread(), - ) - else: - log.error("Unsupported probe type: %r", type(probe)) + except TypeError: + log.error("Unsupported probe type: %r", type(probe), exc_info=True) return - signal.do_line() + signal.do_line(self._global_rate_limiter if probe.is_global_rate_limited() else None) if signal.state is SignalState.DONE: self._probe_registry.set_emitting(probe) @@ -434,7 +386,7 @@ def _probe_injection_hook(self, module: ModuleType) -> None: # Group probes by function so that we decompile each function once and # bulk-inject the probes. - probes_for_function: Dict[FullyNamedWrappedFunction, List[Probe]] = defaultdict(list) + probes_for_function: Dict[FullyNamedContextWrappedFunction, List[Probe]] = defaultdict(list) for probe in self._probe_registry.get_pending(str(origin(module))): if not isinstance(probe, LineLocationMixin): continue @@ -458,7 +410,7 @@ def _probe_injection_hook(self, module: ModuleType) -> None: log.error(message) self._probe_registry.set_error(probe, "NoFunctionsAtLine", message) continue - for function in (cast(FullyNamedWrappedFunction, _) for _ in functions): + for function in (cast(FullyNamedContextWrappedFunction, _) for _ in functions): probes_for_function[function].append(cast(LineProbe, probe)) for function, probes in probes_for_function.items(): @@ -533,14 +485,14 @@ def _eject_probes(self, probes_to_eject: List[LineProbe]) -> None: module = self.__watchdog__.get_by_origin(resolved_source) if module is not None: # The module is still loaded, so we can try to eject the hooks - probes_for_function: Dict[FullyNamedWrappedFunction, List[LineProbe]] = defaultdict(list) + probes_for_function: Dict[FullyNamedContextWrappedFunction, List[LineProbe]] = defaultdict(list) for probe in probes: if not isinstance(probe, LineLocationMixin): continue line = probe.line assert line is not None, probe # nosec functions = FunctionDiscovery.from_module(module).at_line(line) - for function in (cast(FullyNamedWrappedFunction, _) for _ in functions): + for function in (cast(FullyNamedContextWrappedFunction, _) for _ in functions): probes_for_function[function].append(probe) for function, ps in probes_for_function.items(): @@ -647,7 +599,7 @@ def _unwrap_functions(self, probes: List[FunctionProbe]) -> None: context = cast(DebuggerWrappingContext, DebuggerWrappingContext.extract(function)) context.remove_probe(probe) if not context.has_probes(): - self._function_store.unwrap(cast(FullyNamedWrappedFunction, function)) + self._function_store.unwrap(cast(FullyNamedContextWrappedFunction, function)) log.debug("Unwrapped %r", registered_probe) else: log.error("Attempted to unwrap %r, but no wrapper found", registered_probe) diff --git a/ddtrace/debugging/_encoding.py b/ddtrace/debugging/_encoding.py index aa54add676a..b5f6458f4e2 100644 --- a/ddtrace/debugging/_encoding.py +++ b/ddtrace/debugging/_encoding.py @@ -15,7 +15,7 @@ from typing import Union from ddtrace.debugging._config import di_config -from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.log import LogSignal from ddtrace.debugging._signal.snapshot import Snapshot from ddtrace.internal import forksafe from ddtrace.internal._encoding import BufferFull diff --git a/ddtrace/debugging/_exception/replay.py b/ddtrace/debugging/_exception/replay.py index 3a54bce6f51..080b4cbfc61 100644 --- a/ddtrace/debugging/_exception/replay.py +++ b/ddtrace/debugging/_exception/replay.py @@ -40,7 +40,8 @@ CAPTURE_TRACE_TAG = "_dd.debug.error.trace_captured" # unique exception id -EXCEPTION_ID_TAG = "_dd.debug.error.exception_id" +EXCEPTION_HASH_TAG = "_dd.debug.error.exception_hash" +EXCEPTION_ID_TAG = "_dd.debug.error.exception_capture_id" # link to matching snapshot for every frame in the traceback FRAME_SNAPSHOT_ID_TAG = "_dd.debug.error.%d.snapshot_id" @@ -80,9 +81,8 @@ def exception_chain_ident(chain: ExceptionChain) -> int: return h -def limit_exception(chain: ExceptionChain) -> bool: +def limit_exception(exc_ident: int) -> bool: try: - exc_ident = exception_chain_ident(chain) hg = EXCEPTION_IDENT_LIMITER.get(exc_ident) if hg is None: # We haven't seen this exception yet, or it's been evicted @@ -170,7 +170,7 @@ class SpanExceptionSnapshot(Snapshot): @property def data(self) -> t.Dict[str, t.Any]: data = super().data - data.update({"exception-id": str(self.exc_id)}) + data.update({"exceptionId": str(self.exc_id)}) return data @@ -218,7 +218,8 @@ def on_span_exception( # No exceptions to capture return - if limit_exception(chain): + exc_ident = exception_chain_ident(chain) + if limit_exception(exc_ident): # We have seen this exception recently return @@ -272,6 +273,7 @@ def on_span_exception( _tb = _tb.tb_next span.set_tag_str(DEBUG_INFO_TAG, "true") + span.set_tag_str(EXCEPTION_HASH_TAG, str(exc_ident)) span.set_tag_str(EXCEPTION_ID_TAG, str(exc_id)) @classmethod diff --git a/ddtrace/debugging/_expressions.py b/ddtrace/debugging/_expressions.py index 50028b9c6d2..32b87017cdf 100644 --- a/ddtrace/debugging/_expressions.py +++ b/ddtrace/debugging/_expressions.py @@ -23,6 +23,7 @@ arg_operation => {"": []} arg_op_type => filter | substring | getmember | index """ # noqa + from dataclasses import dataclass from itertools import chain import re @@ -62,7 +63,9 @@ def _is_identifier(name: str) -> bool: def short_circuit_instrs(op: str, label: Label) -> List[Instr]: value = "FALSE" if op == "and" else "TRUE" - if PY >= (3, 12): + if PY >= (3, 13): + return [Instr("COPY", 1), Instr("TO_BOOL"), Instr(f"POP_JUMP_IF_{value}", label), Instr("POP_TOP")] + elif PY >= (3, 12): return [Instr("COPY", 1), Instr(f"POP_JUMP_IF_{value}", label), Instr("POP_TOP")] return [Instr(f"JUMP_IF_{value}_OR_POP", label)] @@ -144,6 +147,9 @@ def _compile_direct_predicate(self, ast: DDASTType) -> Optional[List[Instr]]: value.append(Instr("LOAD_FAST", "_locals")) value.append(IN_OPERATOR_INSTR) else: + if PY >= (3, 13): + # UNARY_NOT requires a boolean value + value.append(Instr("TO_BOOL")) value.append(Instr("UNARY_NOT")) return value @@ -249,17 +255,18 @@ def _compile_direct_operation(self, ast: DDASTType) -> Optional[List[Instr]]: return None def _call_function(self, func: Callable, *args: List[Instr]) -> List[Instr]: - if PY < (3, 11): - return [Instr("LOAD_CONST", func)] + list(chain(*args)) + [Instr("CALL_FUNCTION", len(args))] - elif PY >= (3, 12): + if PY >= (3, 13): + return [Instr("LOAD_CONST", func), Instr("PUSH_NULL")] + list(chain(*args)) + [Instr("CALL", len(args))] + if PY >= (3, 12): return [Instr("PUSH_NULL"), Instr("LOAD_CONST", func)] + list(chain(*args)) + [Instr("CALL", len(args))] + if PY >= (3, 11): + return ( + [Instr("PUSH_NULL"), Instr("LOAD_CONST", func)] + + list(chain(*args)) + + [Instr("PRECALL", len(args)), Instr("CALL", len(args))] + ) - # Python 3.11 - return ( - [Instr("PUSH_NULL"), Instr("LOAD_CONST", func)] - + list(chain(*args)) - + [Instr("PRECALL", len(args)), Instr("CALL", len(args))] - ) + return [Instr("LOAD_CONST", func)] + list(chain(*args)) + [Instr("CALL_FUNCTION", len(args))] def _compile_arg_operation(self, ast: DDASTType) -> Optional[List[Instr]]: # arg_operation => {"": []} diff --git a/ddtrace/debugging/_function/discovery.py b/ddtrace/debugging/_function/discovery.py index 9cabb4b3a04..6a259f0f93c 100644 --- a/ddtrace/debugging/_function/discovery.py +++ b/ddtrace/debugging/_function/discovery.py @@ -4,6 +4,7 @@ from wrapt import FunctionWrapper +from ddtrace.internal.compat import PYTHON_VERSION_INFO from ddtrace.internal.utils.inspection import undecorated @@ -12,6 +13,7 @@ except ImportError: from typing_extensions import Protocol # type: ignore[assignment] +from types import CodeType from types import FunctionType from types import ModuleType from typing import Any @@ -27,6 +29,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.module import origin from ddtrace.internal.safety import _isinstance +from ddtrace.internal.utils.inspection import collect_code_objects from ddtrace.internal.utils.inspection import linenos @@ -49,6 +52,8 @@ class FullyNamed(Protocol): class FullyNamedFunction(FullyNamed): """A fully named function object.""" + __qualname__: str + def __call__(self, *args, **kwargs): pass @@ -119,7 +124,49 @@ def _local_name(name: str, f: FunctionType) -> str: return func_name -def _collect_functions(module: ModuleType) -> Dict[str, FullyNamedFunction]: +class _FunctionCodePair: + """Function-Code Pair + + This class allows us to resolve a code object to a function object by + querying the GC on-demand. + """ + + __slots__ = ("function", "code") + + def __init__(self, code: Optional[CodeType] = None, function: Optional[FunctionType] = None) -> None: + if code is not None and function is not None and function.__code__ is not code: + raise ValueError("Function and code objects do not match") + + self.function = function + self.code = function.__code__ if function is not None else code + + def resolve(self) -> FullyNamedFunction: + import gc + + if self.function is not None: + return cast(FullyNamedFunction, self.function) + + code = self.code + functions = [_ for _ in gc.get_referrers(code) if isinstance(_, FunctionType) and _.__code__ is code] + n = len(functions) + if n == 0: + msg = f"Cannot resolve code object to function: {code}" + raise ValueError(msg) + if n > 1: + # This can happen for functions that are created at runtime rather + # than compile time. We do not support this case deliberately for + # now. + msg = f"Multiple functions found for code object {code}" + raise ValueError(msg) + + self.function = _f = functions[0] + f = cast(FullyNamedFunction, _f) + f.__fullname__ = f"{f.__module__}.{f.__qualname__}" + + return f + + +def _collect_functions(module: ModuleType) -> Dict[str, _FunctionCodePair]: """Collect functions from a given module. All the collected functions are augmented with a ``__fullname__`` attribute @@ -160,7 +207,9 @@ def _collect_functions(module: ModuleType) -> Dict[str, FullyNamedFunction]: # try to retrieve any potentially decorated function so # that we don't end up returning the decorator function # instead of the original function. - functions[fullname] = undecorated(f, name, path) if name == k else o + functions[fullname] = _FunctionCodePair( + function=cast(FunctionType, undecorated(f, name, path) if name == k else o) + ) try: if f.__closure__: @@ -189,28 +238,48 @@ class FunctionDiscovery(defaultdict): def __init__(self, module: ModuleType) -> None: super().__init__(list) - self._module = module - self._fullname_index = {} - functions = _collect_functions(module) - seen_functions = set() module_path = origin(module) if module_path is None: # We are not going to collect anything because no code objects will # match the origin. return - for fname, function in functions.items(): - if ( - function not in seen_functions - and Path(cast(FunctionType, function).__code__.co_filename).resolve() == module_path - ): - # We only map line numbers for functions that actually belong to - # the module. - for lineno in linenos(cast(FunctionType, function)): - self[lineno].append(function) - self._fullname_index[fname] = function - seen_functions.add(function) + self._module = module + self._fullname_index = _collect_functions(module) + if PYTHON_VERSION_INFO < (3, 11): + self._name_index: Dict[str, List[_FunctionCodePair]] = defaultdict(list) + self._cached: Dict[int, List[FullyNamedFunction]] = {} + + # Create the line to function mapping + if hasattr(module, "__dd_code__"): + for code in module.__dd_code__: + fcp = _FunctionCodePair(code=code) + + if PYTHON_VERSION_INFO >= (3, 11): + # From this version of Python we can derive the qualified + # name of the function directly from the code object. + fullname = f"{module.__name__}.{code.co_qualname}" + self._fullname_index[fullname] = fcp + else: + self._name_index[code.co_name].append(fcp) + + for lineno in linenos(code): + self[lineno].append(fcp) + else: + # If the module was already loaded we don't have its code object + seen_functions = set() + for _, fcp in self._fullname_index.items(): + function = fcp.resolve() + if ( + function not in seen_functions + and Path(cast(FunctionType, function).__code__.co_filename).resolve() == module_path + ): + # We only map line numbers for functions that actually belong to + # the module. + for lineno in linenos(cast(FunctionType, function)): + self[lineno].append(_FunctionCodePair(function=cast(FunctionType, function))) + seen_functions.add(function) def at_line(self, line: int) -> List[FullyNamedFunction]: """Get the functions at the given line. @@ -218,14 +287,55 @@ def at_line(self, line: int) -> List[FullyNamedFunction]: Note that, in general, there can be multiple copies of the same functions. This can happen as a result, e.g., of using decorators. """ - return self[line] + if line in self._cached: + return self._cached[line] + + if line in self: + functions = [] + for fcp in self[line]: + try: + functions.append(fcp.resolve()) + except ValueError: + pass + + if not functions: + del self[line] + else: + self._cached[line] = functions + + return functions + + return [] def by_name(self, qualname: str) -> FullyNamedFunction: """Get the function by its qualified name.""" - fullname = ".".join((self._module.__name__, qualname)) + fullname = f"{self._module.__name__}.{qualname}" try: - return self._fullname_index[fullname] + return self._fullname_index[fullname].resolve() except KeyError: + if PYTHON_VERSION_INFO < (3, 11): + # Check if any code objects whose names match the last part of + # the qualified name have a function with the same qualified + # name. + for name, fcps in self._name_index.items(): + if qualname == name or qualname.endswith(f".{name}"): + for fcp in list(fcps): + try: + f = fcp.resolve() + + # We have resolved the function so we can now + # get its full name + self._fullname_index[f"{self._module.__name__}.{f.__qualname__}"] = fcp + + # We can remove the entry from the name index + fcps.pop(0) + + # If this is the function we are looking for, + # return it + if f.__qualname__ == qualname: + return f + except ValueError: + pass raise ValueError("Function '%s' not found" % fullname) @classmethod @@ -241,4 +351,12 @@ def from_module(cls, module: ModuleType) -> "FunctionDiscovery": return module.__function_discovery__ except AttributeError: fd = module.__function_discovery__ = cls(module) # type: ignore[attr-defined] + if hasattr(module, "__dd_code__"): + # We no longer need to keep this collection around + del module.__dd_code__ return fd + + @classmethod + def transformer(cls, code: CodeType, module: ModuleType) -> CodeType: + module.__dd_code__ = collect_code_objects(code) # type: ignore[attr-defined] # type: ignore[attr-defined] + return code diff --git a/ddtrace/debugging/_function/store.py b/ddtrace/debugging/_function/store.py index 9a17aae3b91..e11c75070a2 100644 --- a/ddtrace/debugging/_function/store.py +++ b/ddtrace/debugging/_function/store.py @@ -13,14 +13,14 @@ from ddtrace.internal.injection import HookType from ddtrace.internal.injection import eject_hooks from ddtrace.internal.injection import inject_hooks -from ddtrace.internal.wrapping import WrappedFunction +from ddtrace.internal.wrapping.context import ContextWrappedFunction from ddtrace.internal.wrapping.context import WrappingContext WrapperType = Callable[[FunctionType, Any, Any, Any], Any] -class FullyNamedWrappedFunction(FullyNamed, WrappedFunction): +class FullyNamedContextWrappedFunction(FullyNamed, ContextWrappedFunction): """A fully named wrapper function.""" @@ -54,17 +54,17 @@ def _store(self, function: FunctionType) -> None: if function not in self._code_map: self._code_map[function] = function.__code__ - def inject_hooks(self, function: FullyNamedWrappedFunction, hooks: List[HookInfoType]) -> Set[str]: + def inject_hooks(self, function: FullyNamedContextWrappedFunction, hooks: List[HookInfoType]) -> Set[str]: """Bulk-inject hooks into a function. Returns the set of probe IDs for those probes that failed to inject. """ try: - return self.inject_hooks(cast(FullyNamedWrappedFunction, function.__dd_wrapped__), hooks) + f = cast(FunctionType, cast(FullyNamedContextWrappedFunction, function.__dd_context_wrapped__.__wrapped__)) # type: ignore[union-attr] except AttributeError: f = cast(FunctionType, function) - self._store(f) - return {p.probe_id for _, _, p in inject_hooks(f, hooks)} + self._store(f) + return {p.probe_id for _, _, p in inject_hooks(f, hooks)} def eject_hooks(self, function: FunctionType, hooks: List[HookInfoType]) -> Set[str]: """Bulk-eject hooks from a function. @@ -72,15 +72,14 @@ def eject_hooks(self, function: FunctionType, hooks: List[HookInfoType]) -> Set[ Returns the set of probe IDs for those probes that failed to eject. """ try: - wrapped = cast(FullyNamedWrappedFunction, function).__dd_wrapped__ + f = cast(FullyNamedContextWrappedFunction, function).__dd_context_wrapped__.__wrapped__ # type: ignore[union-attr] except AttributeError: # Not a wrapped function so we can actually eject from it - return {p.probe_id for _, _, p in eject_hooks(function, hooks)} - else: - # Try on the wrapped function. - return self.eject_hooks(cast(FunctionType, wrapped), hooks) + f = function - def inject_hook(self, function: FullyNamedWrappedFunction, hook: HookType, line: int, arg: Any) -> bool: + return {p.probe_id for _, _, p in eject_hooks(cast(FunctionType, f), hooks)} + + def inject_hook(self, function: FullyNamedContextWrappedFunction, hook: HookType, line: int, arg: Any) -> bool: """Inject a hook into a function.""" return not not self.inject_hooks(function, [(hook, line, arg)]) @@ -94,7 +93,7 @@ def wrap(self, function: FunctionType, wrapping_context: WrappingContext) -> Non self._wrapper_map[function] = wrapping_context wrapping_context.wrap() - def unwrap(self, function: FullyNamedWrappedFunction) -> None: + def unwrap(self, function: FullyNamedContextWrappedFunction) -> None: """Unwrap a hook around a wrapped function.""" self._wrapper_map.pop(cast(FunctionType, function)).unwrap() diff --git a/ddtrace/debugging/_probe/model.py b/ddtrace/debugging/_probe/model.py index 6f989d627f4..f832484ac6d 100644 --- a/ddtrace/debugging/_probe/model.py +++ b/ddtrace/debugging/_probe/model.py @@ -85,6 +85,9 @@ def update(self, other: "Probe") -> None: for attrib in (f.name for f in fields(self) if f.compare): setattr(self, attrib, getattr(other, attrib)) + def is_global_rate_limited(self) -> bool: + return False + def __hash__(self): return hash(self.probe_id) @@ -245,12 +248,14 @@ class LogProbeMixin(AbstractProbeMixIn): @dataclass class LogLineProbe(Probe, LineLocationMixin, LogProbeMixin, ProbeConditionMixin, RateLimitMixin): - pass + def is_global_rate_limited(self) -> bool: + return self.take_snapshot @dataclass class LogFunctionProbe(Probe, FunctionLocationMixin, TimingMixin, LogProbeMixin, ProbeConditionMixin, RateLimitMixin): - pass + def is_global_rate_limited(self) -> bool: + return self.take_snapshot @dataclass diff --git a/ddtrace/debugging/_signal/__init__.py b/ddtrace/debugging/_signal/__init__.py index e69de29bb2d..6dea9afe965 100644 --- a/ddtrace/debugging/_signal/__init__.py +++ b/ddtrace/debugging/_signal/__init__.py @@ -0,0 +1,5 @@ +# DEV: Import these modules to allow registering the single dispatch functions +from ddtrace.debugging._signal.metric_sample import MetricSample # noqa +from ddtrace.debugging._signal.snapshot import Snapshot # noqa +from ddtrace.debugging._signal.tracing import DynamicSpan # noqa +from ddtrace.debugging._signal.tracing import SpanDecoration # noqa diff --git a/ddtrace/debugging/_signal/collector.py b/ddtrace/debugging/_signal/collector.py index 461e8ff1af6..57868b30485 100644 --- a/ddtrace/debugging/_signal/collector.py +++ b/ddtrace/debugging/_signal/collector.py @@ -6,7 +6,7 @@ from ddtrace.debugging._encoding import BufferedEncoder from ddtrace.debugging._metrics import metrics -from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.log import LogSignal from ddtrace.debugging._signal.model import Signal from ddtrace.debugging._signal.model import SignalState from ddtrace.internal._encoding import BufferFull diff --git a/ddtrace/debugging/_signal/log.py b/ddtrace/debugging/_signal/log.py new file mode 100644 index 00000000000..23cde73f642 --- /dev/null +++ b/ddtrace/debugging/_signal/log.py @@ -0,0 +1,67 @@ +import abc +from dataclasses import dataclass +import typing as t + +from ddtrace.debugging._probe.model import FunctionLocationMixin +from ddtrace.debugging._probe.model import LineLocationMixin +from ddtrace.debugging._signal.model import Signal + + +@dataclass +class LogSignal(Signal): + """A signal that also emits a log message. + + Some signals might require sending a log message along with the base signal + data. For example, all the collected errors from expression evaluations + (e.g. conditions) might need to be reported. + """ + + @property + @abc.abstractmethod + def message(self) -> t.Optional[str]: + """The log message to emit.""" + pass + + @abc.abstractmethod + def has_message(self) -> bool: + """Whether the signal has a log message to emit.""" + pass + + @property + def data(self) -> t.Dict[str, t.Any]: + """Extra data to include in the snapshot portion of the log message.""" + return {} + + def _probe_details(self) -> t.Dict[str, t.Any]: + probe = self.probe + if isinstance(probe, LineLocationMixin): + location = { + "file": str(probe.resolved_source_file), + "lines": [str(probe.line)], + } + elif isinstance(probe, FunctionLocationMixin): + location = { + "type": probe.module, + "method": probe.func_qname, + } + else: + return {} + + return { + "id": probe.probe_id, + "version": probe.version, + "location": location, + } + + @property + def snapshot(self) -> t.Dict[str, t.Any]: + full_data = { + "id": self.uuid, + "timestamp": int(self.timestamp * 1e3), # milliseconds + "evaluationErrors": [{"expr": e.expr, "message": e.message} for e in self.errors], + "probe": self._probe_details(), + "language": "python", + } + full_data.update(self.data) + + return full_data diff --git a/ddtrace/debugging/_signal/metric_sample.py b/ddtrace/debugging/_signal/metric_sample.py index f8bebc17d83..d92cdcec173 100644 --- a/ddtrace/debugging/_signal/metric_sample.py +++ b/ddtrace/debugging/_signal/metric_sample.py @@ -4,9 +4,12 @@ from typing import cast from ddtrace.debugging._metrics import probe_metrics +from ddtrace.debugging._probe.model import MetricFunctionProbe +from ddtrace.debugging._probe.model import MetricLineProbe from ddtrace.debugging._probe.model import MetricProbeKind from ddtrace.debugging._probe.model import MetricProbeMixin -from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.log import LogSignal +from ddtrace.debugging._signal.model import probe_to_signal from ddtrace.internal.metrics import Metrics @@ -50,3 +53,13 @@ def message(self) -> Optional[str]: def has_message(self) -> bool: return bool(self.errors) + + +@probe_to_signal.register +def _(probe: MetricFunctionProbe, frame, thread, trace_context, meter): + return MetricSample(probe=probe, frame=frame, thread=thread, trace_context=trace_context, meter=meter) + + +@probe_to_signal.register +def _(probe: MetricLineProbe, frame, thread, trace_context, meter): + return MetricSample(probe=probe, frame=frame, thread=thread, trace_context=trace_context, meter=meter) diff --git a/ddtrace/debugging/_signal/model.py b/ddtrace/debugging/_signal/model.py index a03b157adde..9c9448677c0 100644 --- a/ddtrace/debugging/_signal/model.py +++ b/ddtrace/debugging/_signal/model.py @@ -3,6 +3,8 @@ from dataclasses import dataclass from dataclasses import field from enum import Enum +from functools import singledispatch +import threading from threading import Thread import time from types import FrameType @@ -19,8 +21,6 @@ from ddtrace._trace.context import Context from ddtrace._trace.span import Span from ddtrace.debugging._expressions import DDExpressionEvaluationError -from ddtrace.debugging._probe.model import FunctionLocationMixin -from ddtrace.debugging._probe.model import LineLocationMixin from ddtrace.debugging._probe.model import Probe from ddtrace.debugging._probe.model import ProbeConditionMixin from ddtrace.debugging._probe.model import ProbeEvalTiming @@ -28,6 +28,8 @@ from ddtrace.debugging._probe.model import TimingMixin from ddtrace.debugging._safety import get_args from ddtrace.internal.compat import ExcInfoType +from ddtrace.internal.metrics import Metrics +from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter from ddtrace.internal.rate_limiter import RateLimitExceeded @@ -183,13 +185,17 @@ def do_exit(self, retval: Any, exc_info: ExcInfoType, duration: int) -> None: self.state = SignalState.DONE - def do_line(self) -> None: + def do_line(self, global_limiter: Optional[RateLimiter] = None) -> None: frame = self.frame scope = ChainMap(frame.f_locals, frame.f_globals) if not self._eval_condition(scope): return + if global_limiter is not None and global_limiter.limit() is RateLimitExceeded: + self.state = SignalState.SKIP_RATE + return + if self._rate_limit_exceeded(): return @@ -197,62 +203,19 @@ def do_line(self) -> None: self.state = SignalState.DONE - -@dataclass -class LogSignal(Signal): - """A signal that also emits a log message. - - Some signals might require sending a log message along with the base signal - data. For example, all the collected errors from expression evaluations - (e.g. conditions) might need to be reported. - """ - - @property - @abc.abstractmethod - def message(self) -> Optional[str]: - """The log message to emit.""" - pass - - @abc.abstractmethod - def has_message(self) -> bool: - """Whether the signal has a log message to emit.""" - pass - - @property - def data(self) -> Dict[str, Any]: - """Extra data to include in the snapshot portion of the log message.""" - return {} - - def _probe_details(self) -> Dict[str, Any]: - probe = self.probe - if isinstance(probe, LineLocationMixin): - location = { - "file": str(probe.resolved_source_file), - "lines": [str(probe.line)], - } - elif isinstance(probe, FunctionLocationMixin): - location = { - "type": probe.module, - "method": probe.func_qname, - } - else: - return {} - - return { - "id": probe.probe_id, - "version": probe.version, - "location": location, - } - - @property - def snapshot(self) -> Dict[str, Any]: - full_data = { - "id": self.uuid, - "timestamp": int(self.timestamp * 1e3), # milliseconds - "evaluationErrors": [{"expr": e.expr, "message": e.message} for e in self.errors], - "probe": self._probe_details(), - "language": "python", - } - full_data.update(self.data) - - return full_data + @staticmethod + def from_probe( + probe: Probe, frame: FrameType, thread: Thread, trace_context: Optional[Any], meter: Metrics.Meter + ) -> "Signal": + return probe_to_signal(probe, frame, thread, trace_context, meter) + + +@singledispatch +def probe_to_signal( + probe: Probe, + frame: FrameType, + thread: threading.Thread, + trace_context: Optional[Any], + meter: Metrics.Meter, +) -> Signal: + raise TypeError(f"Unsupported probe type: {type(probe)}") diff --git a/ddtrace/debugging/_signal/snapshot.py b/ddtrace/debugging/_signal/snapshot.py index 9f42921a7a3..5bb02f16659 100644 --- a/ddtrace/debugging/_signal/snapshot.py +++ b/ddtrace/debugging/_signal/snapshot.py @@ -17,6 +17,8 @@ from ddtrace.debugging._probe.model import FunctionLocationMixin from ddtrace.debugging._probe.model import LineLocationMixin from ddtrace.debugging._probe.model import LiteralTemplateSegment +from ddtrace.debugging._probe.model import LogFunctionProbe +from ddtrace.debugging._probe.model import LogLineProbe from ddtrace.debugging._probe.model import LogProbeMixin from ddtrace.debugging._probe.model import TemplateSegment from ddtrace.debugging._redaction import REDACTED_PLACEHOLDER @@ -25,8 +27,9 @@ from ddtrace.debugging._safety import get_globals from ddtrace.debugging._safety import get_locals from ddtrace.debugging._signal import utils +from ddtrace.debugging._signal.log import LogSignal from ddtrace.debugging._signal.model import EvaluationError -from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.model import probe_to_signal from ddtrace.debugging._signal.utils import serialize from ddtrace.internal.compat import ExcInfoType from ddtrace.internal.utils.time import HourGlass @@ -177,3 +180,13 @@ def data(self): "captures": captures, "duration": self.duration, } + + +@probe_to_signal.register +def _(probe: LogFunctionProbe, frame, thread, trace_context, meter): + return Snapshot(probe=probe, frame=frame, thread=thread, trace_context=trace_context) + + +@probe_to_signal.register +def _(probe: LogLineProbe, frame, thread, trace_context, meter): + return Snapshot(probe=probe, frame=frame, thread=thread, trace_context=trace_context) diff --git a/ddtrace/debugging/_signal/tracing.py b/ddtrace/debugging/_signal/tracing.py index 9d3712a963b..3c9eb3f447e 100644 --- a/ddtrace/debugging/_signal/tracing.py +++ b/ddtrace/debugging/_signal/tracing.py @@ -7,12 +7,15 @@ from ddtrace.constants import ORIGIN_KEY from ddtrace.debugging._expressions import DDExpressionEvaluationError from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe +from ddtrace.debugging._probe.model import SpanDecorationLineProbe from ddtrace.debugging._probe.model import SpanDecorationMixin from ddtrace.debugging._probe.model import SpanDecorationTargetSpan from ddtrace.debugging._probe.model import SpanFunctionProbe +from ddtrace.debugging._signal.log import LogSignal from ddtrace.debugging._signal.model import EvaluationError -from ddtrace.debugging._signal.model import LogSignal from ddtrace.debugging._signal.model import Signal +from ddtrace.debugging._signal.model import probe_to_signal from ddtrace.debugging._signal.utils import serialize from ddtrace.internal.compat import ExcInfoType from ddtrace.internal.logger import get_logger @@ -112,3 +115,18 @@ def message(self): def has_message(self) -> bool: return bool(self.errors) + + +@probe_to_signal.register +def _(probe: SpanFunctionProbe, frame, thread, trace_context, meter): + return DynamicSpan(probe=probe, frame=frame, thread=thread, trace_context=trace_context) + + +@probe_to_signal.register +def _(probe: SpanDecorationFunctionProbe, frame, thread, trace_context, meter): + return SpanDecoration(probe=probe, frame=frame, thread=thread) + + +@probe_to_signal.register +def _(probe: SpanDecorationLineProbe, frame, thread, trace_context, meter): + return SpanDecoration(probe=probe, frame=frame, thread=thread) diff --git a/ddtrace/debugging/_signal/utils.py b/ddtrace/debugging/_signal/utils.py index b2e5d8e285b..09b319598ef 100644 --- a/ddtrace/debugging/_signal/utils.py +++ b/ddtrace/debugging/_signal/utils.py @@ -304,6 +304,15 @@ def capture_value( } fields = get_fields(value) + + # Capture exception chain for exceptions + if _isinstance(value, BaseException): + for attr in ("args", "__cause__", "__context__", "__suppress_context__"): + try: + fields[attr] = object.__getattribute__(value, attr) + except AttributeError: + pass + captured_fields = { n: ( capture_value(v, level=level - 1, maxlen=maxlen, maxsize=maxsize, maxfields=maxfields, stopping_cond=cond) diff --git a/ddtrace/debugging/_uploader.py b/ddtrace/debugging/_uploader.py index c6ff84fc190..f8f1a22a9d2 100644 --- a/ddtrace/debugging/_uploader.py +++ b/ddtrace/debugging/_uploader.py @@ -12,7 +12,6 @@ from ddtrace.internal import compat from ddtrace.internal.logger import get_logger from ddtrace.internal.periodic import ForksafeAwakeablePeriodicService -from ddtrace.internal.runtime import container from ddtrace.internal.utils.http import connector from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter @@ -55,9 +54,6 @@ def __init__(self, interval: Optional[float] = None) -> None: "Accept": "text/plain", } - container.update_headers_with_container_info(self._headers, container.get_container_info()) - container.update_header_with_external_info(self._headers) - if di_config._tags_in_qs and di_config.tags: self.ENDPOINT += f"?ddtags={quote(di_config.tags)}" self._connect = connector(di_config._intake_url, timeout=di_config.upload_timeout) diff --git a/ddtrace/ext/__init__.py b/ddtrace/ext/__init__.py index 2387dbd63a4..965dd04f43f 100644 --- a/ddtrace/ext/__init__.py +++ b/ddtrace/ext/__init__.py @@ -7,6 +7,7 @@ class SpanTypes(object): HTTP = "http" MONGODB = "mongodb" REDIS = "redis" + SERVERLESS = "serverless" SQL = "sql" TEMPLATE = "template" TEST = "test" diff --git a/ddtrace/internal/_threads.cpp b/ddtrace/internal/_threads.cpp index 152b7b0da6c..d775544827b 100644 --- a/ddtrace/internal/_threads.cpp +++ b/ddtrace/internal/_threads.cpp @@ -20,8 +20,13 @@ class GILGuard public: inline GILGuard() { - if (!_Py_IsFinalizing()) +#if PY_VERSION_HEX >= 0x030d0000 + if (!Py_IsFinalizing()) { +#else + if (!_Py_IsFinalizing()) { +#endif _state = PyGILState_Ensure(); + } } inline ~GILGuard() { @@ -42,13 +47,23 @@ class AllowThreads public: inline AllowThreads() { - if (!_Py_IsFinalizing()) +#if PY_VERSION_HEX >= 0x30d0000 + if (!Py_IsFinalizing()) { +#else + if (!_Py_IsFinalizing()) { +#endif _state = PyEval_SaveThread(); + } } inline ~AllowThreads() { - if (!_Py_IsFinalizing()) +#if PY_VERSION_HEX >= 0x30d0000 + if (!Py_IsFinalizing()) { +#else + if (!_Py_IsFinalizing()) { +#endif PyEval_RestoreThread(_state); + } } private: @@ -266,8 +281,13 @@ PeriodicThread_start(PeriodicThread* self, PyObject* args) } } - if (_Py_IsFinalizing()) +#if PY_VERSION_HEX >= 0x30d0000 + if (Py_IsFinalizing()) { +#else + if (_Py_IsFinalizing()) { +#endif break; + } if (PeriodicThread__periodic(self)) { // Error @@ -278,8 +298,15 @@ PeriodicThread_start(PeriodicThread* self, PyObject* args) // Run the shutdown callback if there was no error and we are not // at Python shutdown. - if (!self->_atexit && !error && self->_on_shutdown != Py_None && !_Py_IsFinalizing()) - PeriodicThread__on_shutdown(self); + if (!self->_atexit && !error && self->_on_shutdown != Py_None) { +#if PY_VERSION_HEX >= 0x30d0000 + if (!Py_IsFinalizing()) { +#else + if (!_Py_IsFinalizing()) { +#endif + PeriodicThread__on_shutdown(self); + } + } // Notify the join method that the thread has stopped self->_stopped->set(); @@ -418,9 +445,14 @@ PeriodicThread_dealloc(PeriodicThread* self) // Since the native thread holds a strong reference to this object, we // can only get here if the thread has actually stopped. - if (_Py_IsFinalizing()) +#if PY_VERSION_HEX >= 0x30d0000 + if (Py_IsFinalizing()) { +#else + if (_Py_IsFinalizing()) { +#endif // Do nothing. We are about to terminate and release resources anyway. return; + } // If we are trying to stop from the same thread, then we are still running. // This should happen rarely, so we don't worry about the memory leak this diff --git a/ddtrace/internal/_unpatched.py b/ddtrace/internal/_unpatched.py index c226379f759..e209f30ff2a 100644 --- a/ddtrace/internal/_unpatched.py +++ b/ddtrace/internal/_unpatched.py @@ -1,6 +1,7 @@ # Acquire a reference to the open function from the builtins module. This is # necessary to ensure that the open function can be used unpatched when required. from builtins import open as unpatched_open # noqa +from json import loads as unpatched_json_loads # noqa # Acquire a reference to the threading module. Some parts of the library (e.g. # the profiler) might be enabled programmatically and therefore might end up diff --git a/ddtrace/internal/ci_visibility/api/_base.py b/ddtrace/internal/ci_visibility/api/_base.py index dbaa48d1af3..f1e2cd2b3b0 100644 --- a/ddtrace/internal/ci_visibility/api/_base.py +++ b/ddtrace/internal/ci_visibility/api/_base.py @@ -207,7 +207,8 @@ def _finish_span(self, override_finish_time: Optional[float] = None) -> None: if self._session_settings.atr_settings is not None and self._session_settings.atr_settings.enabled: self._set_atr_tags() - # Allow item-level _set_span_tags() to potentially overwrite default and hierarchy tags. + # Allow items to potentially overwrite default and hierarchy tags. + self._set_item_tags() self._set_span_tags() self._add_all_tags_to_span() @@ -247,6 +248,10 @@ def _set_default_tags(self) -> None: if self._source_file_info.end_line is not None: self.set_tag(test.SOURCE_END, self._source_file_info.end_line) + def _set_item_tags(self) -> None: + """Overridable by subclasses to set tags specific to the item type""" + pass + def _set_itr_tags(self, itr_enabled: bool) -> None: """Note: some tags are also added in the parent class as well as some individual item classes""" if not itr_enabled: diff --git a/ddtrace/internal/ci_visibility/api/_session.py b/ddtrace/internal/ci_visibility/api/_session.py index b6407cc86be..5267a345c0a 100644 --- a/ddtrace/internal/ci_visibility/api/_session.py +++ b/ddtrace/internal/ci_visibility/api/_session.py @@ -119,8 +119,8 @@ def set_efd_abort_reason(self, abort_reason: str): self._efd_abort_reason = abort_reason def efd_is_faulty_session(self): - """A session is considered "EFD faulty" if percentage of tests considered new is greater than the given - threshold + """A session is considered "EFD faulty" if the percentage of tests considered new is greater than the + given threshold, and the total number of news tests exceeds the threshold. NOTE: this behavior is cached on the assumption that this method will only be called once """ @@ -130,16 +130,19 @@ def efd_is_faulty_session(self): if self._session_settings.efd_settings.enabled is False: return False - total_tests = 0 - new_tests = 0 + total_tests_count = 0 + new_tests_count = 0 for _module in self._children.values(): for _suite in _module._children.values(): for _test in _suite._children.values(): - total_tests += 1 + total_tests_count += 1 if _test.is_new(): - new_tests += 1 + new_tests_count += 1 - new_tests_pct = 100 * (new_tests / total_tests) + if new_tests_count <= self._session_settings.efd_settings.faulty_session_threshold: + return False + + new_tests_pct = 100 * (new_tests_count / total_tests_count) self._efd_is_faulty_session = new_tests_pct > self._session_settings.efd_settings.faulty_session_threshold diff --git a/ddtrace/internal/ci_visibility/api/_test.py b/ddtrace/internal/ci_visibility/api/_test.py index 73dc6397b63..c63d9753eb8 100644 --- a/ddtrace/internal/ci_visibility/api/_test.py +++ b/ddtrace/internal/ci_visibility/api/_test.py @@ -5,6 +5,7 @@ from typing import Optional from typing import Union +from ddtrace.contrib.pytest_benchmark.constants import BENCHMARK_INFO from ddtrace.ext import SpanTypes from ddtrace.ext import test from ddtrace.ext.test_visibility import ITR_SKIPPING_LEVEL @@ -17,6 +18,7 @@ from ddtrace.internal.ci_visibility.api._base import TestVisibilityItemBase from ddtrace.internal.ci_visibility.api._base import TestVisibilitySessionSettings from ddtrace.internal.ci_visibility.api._coverage_data import TestVisibilityCoverageData +from ddtrace.internal.ci_visibility.constants import BENCHMARK from ddtrace.internal.ci_visibility.constants import TEST from ddtrace.internal.ci_visibility.constants import TEST_EFD_ABORT_REASON from ddtrace.internal.ci_visibility.constants import TEST_IS_NEW @@ -25,6 +27,8 @@ from ddtrace.internal.ci_visibility.telemetry.events import record_event_created_test from ddtrace.internal.ci_visibility.telemetry.events import record_event_finished_test from ddtrace.internal.logger import get_logger +from ddtrace.internal.test_visibility._benchmark_mixin import BENCHMARK_TAG_MAP +from ddtrace.internal.test_visibility._benchmark_mixin import BenchmarkDurationData from ddtrace.internal.test_visibility._efd_mixins import EFDTestStatus from ddtrace.internal.test_visibility._internal_item_ids import InternalTestId from ddtrace.internal.test_visibility.coverage_lines import CoverageLines @@ -78,8 +82,11 @@ def __init__( self._atr_is_retry = is_atr_retry self._atr_retries: List[TestVisibilityTest] = [] - # Currently unsupported - self._is_benchmark = None + self._is_benchmark = False + self._benchmark_duration_data: Optional[BenchmarkDurationData] = None + + # Some parameters can be overwritten: + self._overwritten_suite_name: Optional[str] = None def __repr__(self) -> str: suite_name = self.parent.name if self.parent is not None else "none" @@ -93,6 +100,14 @@ def _get_hierarchy_tags(self) -> Dict[str, str]: test.NAME: self.name, } + def _set_item_tags(self) -> None: + """Overrides parent tags for cases where they need to be modified""" + if self._is_benchmark: + self.set_tag(test.TYPE, BENCHMARK) + + if self._overwritten_suite_name is not None: + self.set_tag(test.SUITE, self._overwritten_suite_name) + def _set_efd_tags(self) -> None: if self._efd_is_retry: self.set_tag(TEST_IS_RETRY, self._efd_is_retry) @@ -101,8 +116,10 @@ def _set_efd_tags(self) -> None: self.set_tag(TEST_EFD_ABORT_REASON, self._efd_abort_reason) # NOTE: The is_new tag is currently only being set in the context of EFD (since that is the only context in - # which unique tests are fetched). - if self.is_new(): + # which unique tests are fetched). Additionally, if a session is considered faulty, we do not want to tag the + # test as new. + session = self.get_session() + if self.is_new() and session is not None and not session.efd_is_faulty_session(): self.set_tag(TEST_IS_NEW, self._is_new) def _set_atr_tags(self) -> None: @@ -191,6 +208,22 @@ def finish_itr_skipped(self) -> None: self.mark_itr_skipped() self.finish_test(TestStatus.SKIP) + def overwrite_attributes( + self, + name: Optional[str] = None, + suite_name: Optional[str] = None, + parameters: Optional[str] = None, + codeowners: Optional[List[str]] = None, + ) -> None: + if name is not None: + self.name = name + if suite_name is not None: + self._overwritten_suite_name = suite_name + if parameters is not None: + self.set_parameters(parameters) + if codeowners is not None: + self._codeowners = codeowners + def add_coverage_data(self, coverage_data: Dict[Path, CoverageLines]) -> None: self._coverage_data.add_covered_files(coverage_data) @@ -396,3 +429,18 @@ def _get_browser_driver(self): if self._span is None: return None return self._span.get_tag("test.browser.driver") + + # + # Benchmark test functionality + # + def set_benchmark_data(self, duration_data: Optional[BenchmarkDurationData], is_benchmark: bool = True): + self._benchmark_duration_data = duration_data + self._is_benchmark = is_benchmark + + if self._benchmark_duration_data is not None: + self.set_tag(BENCHMARK_INFO, "Time") + + for tag, attr in BENCHMARK_TAG_MAP.items(): + value = getattr(self._benchmark_duration_data, tag) + if value is not None: + self.set_tag(attr, value) diff --git a/ddtrace/internal/ci_visibility/constants.py b/ddtrace/internal/ci_visibility/constants.py index f30b6743f5a..7ace37b9424 100644 --- a/ddtrace/internal/ci_visibility/constants.py +++ b/ddtrace/internal/ci_visibility/constants.py @@ -4,6 +4,7 @@ SUITE = "suite" TEST = "test" +BENCHMARK = "benchmark" EVENT_TYPE = "type" diff --git a/ddtrace/internal/ci_visibility/recorder.py b/ddtrace/internal/ci_visibility/recorder.py index 12bb3688dad..0046c21be15 100644 --- a/ddtrace/internal/ci_visibility/recorder.py +++ b/ddtrace/internal/ci_visibility/recorder.py @@ -74,10 +74,12 @@ from ddtrace.internal.service import Service from ddtrace.internal.test_visibility._atr_mixins import ATRTestMixin from ddtrace.internal.test_visibility._atr_mixins import AutoTestRetriesSettings +from ddtrace.internal.test_visibility._benchmark_mixin import BenchmarkTestMixin from ddtrace.internal.test_visibility._efd_mixins import EFDTestMixin from ddtrace.internal.test_visibility._efd_mixins import EFDTestStatus from ddtrace.internal.test_visibility._internal_item_ids import InternalTestId from ddtrace.internal.test_visibility._itr_mixins import ITRMixin +from ddtrace.internal.test_visibility.api import InternalTest from ddtrace.internal.test_visibility.coverage_lines import CoverageLines from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.http import verify_url @@ -1007,6 +1009,12 @@ def _on_session_get_codeowners() -> Optional[Codeowners]: return CIVisibility.get_codeowners() +@_requires_civisibility_enabled +def _on_session_get_tracer() -> Optional[Tracer]: + log.debug("Getting tracer") + return CIVisibility.get_tracer() + + @_requires_civisibility_enabled def _on_session_is_atr_enabled() -> bool: log.debug("Getting Auto Test Retries enabled") @@ -1031,7 +1039,7 @@ def _on_session_get_path_codeowners(path: Path) -> Optional[List[str]]: codeowners = CIVisibility.get_codeowners() if codeowners is None: return None - return codeowners.of(str(path.absolute())) + return codeowners.of(str(path)) def _register_session_handlers(): @@ -1040,6 +1048,7 @@ def _register_session_handlers(): core.on("test_visibility.session.start", _on_start_session) core.on("test_visibility.session.finish", _on_finish_session) core.on("test_visibility.session.get_codeowners", _on_session_get_codeowners, "codeowners") + core.on("test_visibility.session.get_tracer", _on_session_get_tracer, "tracer") core.on("test_visibility.session.get_path_codeowners", _on_session_get_path_codeowners, "path_codeowners") core.on("test_visibility.session.get_workspace_path", _on_session_get_workspace_path, "workspace_path") core.on("test_visibility.session.is_atr_enabled", _on_session_is_atr_enabled, "is_atr_enabled") @@ -1181,6 +1190,27 @@ def _on_set_test_parameters(item_id: TestId, parameters: str): CIVisibility.get_test_by_id(item_id).set_parameters(parameters) +@_requires_civisibility_enabled +def _on_set_benchmark_data(set_benchmark_data_args: BenchmarkTestMixin.SetBenchmarkDataArgs): + item_id = set_benchmark_data_args.test_id + data = set_benchmark_data_args.benchmark_data + is_benchmark = set_benchmark_data_args.is_benchmark + log.debug("Handling set benchmark data for test id %s, data %s, is_benchmark %s", item_id, data, is_benchmark) + CIVisibility.get_test_by_id(item_id).set_benchmark_data(data, is_benchmark) + + +@_requires_civisibility_enabled +def _on_test_overwrite_attributes(overwrite_attribute_args: InternalTest.OverwriteAttributesArgs): + item_id = overwrite_attribute_args.test_id + name = overwrite_attribute_args.name + suite_name = overwrite_attribute_args.suite_name + parameters = overwrite_attribute_args.parameters + codeowners = overwrite_attribute_args.codeowners + + log.debug("Handling overwrite attributes: %s", overwrite_attribute_args) + CIVisibility.get_test_by_id(item_id).overwrite_attributes(name, suite_name, parameters, codeowners) + + def _register_test_handlers(): log.debug("Registering test handlers") core.on("test_visibility.test.discover", _on_discover_test) @@ -1188,6 +1218,8 @@ def _register_test_handlers(): core.on("test_visibility.test.start", _on_start_test) core.on("test_visibility.test.finish", _on_finish_test) core.on("test_visibility.test.set_parameters", _on_set_test_parameters) + core.on("test_visibility.test.set_benchmark_data", _on_set_benchmark_data) + core.on("test_visibility.test.overwrite_attributes", _on_test_overwrite_attributes) @_requires_civisibility_enabled diff --git a/ddtrace/internal/datadog/profiling/build_standalone.sh b/ddtrace/internal/datadog/profiling/build_standalone.sh index 286f6d179a2..c7bc4c14af9 100755 --- a/ddtrace/internal/datadog/profiling/build_standalone.sh +++ b/ddtrace/internal/datadog/profiling/build_standalone.sh @@ -94,6 +94,9 @@ compiler_args["cppcheck"]="-DDO_CPPCHECK=ON" compiler_args["infer"]="-DDO_INFER=ON" compiler_args["clangtidy"]="-DDO_CLANGTIDY=ON" compiler_args["clangtidy_cmd"]="-DCLANGTIDY_CMD=${CLANGTIDY_CMD}" +compiler_args["valgrind"]="-DDO_VALGRIND=ON" + +ctest_args=() # Initial cmake args cmake_args=( @@ -103,8 +106,8 @@ cmake_args=( -DPython3_ROOT_DIR=$(python3 -c "import sysconfig; print(sysconfig.get_config_var('prefix'))") ) -# Initial build targets; no matter what, dd_wrapper is the base dependency, so it's always built -targets=("dd_wrapper") +# Initial build targets; start out empty +targets=() set_cc() { if [ -z "${CC:-}" ]; then @@ -169,7 +172,7 @@ run_cmake() { fi if [[ " ${cmake_args[*]} " =~ " -DBUILD_TESTING=ON " ]]; then echo "--------------------------------------------------------------------- Running Tests" - ctest --output-on-failure || { echo "tests failed!"; exit 1; } + ctest ${ctest_args[*]} --output-on-failure || { echo "tests failed!"; exit 1; } fi # OK, the build or whatever went fine I guess. @@ -223,6 +226,10 @@ print_cmake_args() { echo "Targets: ${targets[*]}" } +print_ctest_args() { + echo "CTest Args: ${ctest_args[*]}" +} + ### Check input # Check the first slot, options add_compiler_args() { @@ -263,6 +270,11 @@ add_compiler_args() { cmake_args+=(${compiler_args["memory"]}) set_clang ;; + --valgrind) + cmake_args+=(${compiler_args["valgrind"]}) + ctest_args+="-T memcheck" + set_clang + ;; -C|--cppcheck) cmake_args+=(${compiler_args["cppcheck"]}) set_clang @@ -333,7 +345,9 @@ add_target() { targets+=("crashtracker") ;; dd_wrapper) - # We always build dd_wrapper, so no need to add it to the list + # `dd_wrapper` is a dependency of other targets, but the overall structure is weird when it's given explicitly + # so we only include it when it's called explicitly + targets+=("dd_wrapper") ;; stack_v2) targets+=("stack_v2") @@ -367,6 +381,8 @@ add_target "$3" # Print cmake args print_cmake_args +print_ctest_args + # Run cmake for target in "${targets[@]}"; do run_cmake $target diff --git a/ddtrace/internal/datadog/profiling/cmake/AnalysisFunc.cmake b/ddtrace/internal/datadog/profiling/cmake/AnalysisFunc.cmake index b95a1205a62..2495a84ed29 100644 --- a/ddtrace/internal/datadog/profiling/cmake/AnalysisFunc.cmake +++ b/ddtrace/internal/datadog/profiling/cmake/AnalysisFunc.cmake @@ -1,36 +1,29 @@ include(CheckIPOSupported) function(add_ddup_config target) - # Profiling native extensions are built with C++17, even though underlying - # repo adheres to the manylinux 2014 standard. This isn't currently a - # problem, but if it becomes one, we may have to structure the library - # differently. + # Profiling native extensions are built with C++17, even though underlying repo adheres to the manylinux 2014 + # standard. This isn't currently a problem, but if it becomes one, we may have to structure the library differently. target_compile_features(${target} PUBLIC cxx_std_17) # Common compile options - target_compile_options(${target} PRIVATE "$<$:-Os>" -ffunction-sections - -Wall - -Werror - -Wextra - -Wshadow - -Wnon-virtual-dtor - -Wold-style-cast) + target_compile_options( + ${target} + PRIVATE "$<$:-Os>" + -ffunction-sections + -Wall + -Werror + -Wextra + -Wshadow + -Wnon-virtual-dtor + -Wold-style-cast) if(CMAKE_SYSTEM_NAME STREQUAL "Darwin") # macOS-specific options - target_compile_options( - ${target} - PRIVATE "$<$:-Og;-g>" - "$<$:-Os;-g>" - ) + target_compile_options(${target} PRIVATE "$<$:-Og;-g>" "$<$:-Os;-g>") else() # Non-macOS (e.g., Linux) options - target_compile_options( - ${target} - PRIVATE "$<$:-Og;-ggdb3>" - "$<$:-Os;-ggdb3>" - -fno-semantic-interposition - ) + target_compile_options(${target} PRIVATE "$<$:-Og;-ggdb3>" + "$<$:-Os;-ggdb3>" -fno-semantic-interposition) endif() # Common link options @@ -66,22 +59,21 @@ function(add_ddup_config target) target_compile_options(${target} PRIVATE -fsanitize=${SANITIZE_OPTIONS} -fno-omit-frame-pointer) target_link_options(${target} PRIVATE -fsanitize=${SANITIZE_OPTIONS} -shared-libsan) - # Locate all directories containing relevant `.so` files - execute_process( - COMMAND bash -c "find $(${CMAKE_CXX_COMPILER} -print-file-name=) -name '*.so' -exec dirname {} \; | uniq" - OUTPUT_VARIABLE LIBSAN_LIB_PATHS - OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY) + # Locate all directories containing relevant `.so` files + execute_process( + COMMAND bash -c "find $(${CMAKE_CXX_COMPILER} -print-file-name=) -name '*.so' -exec dirname {} \; | uniq" + OUTPUT_VARIABLE LIBSAN_LIB_PATHS + OUTPUT_STRIP_TRAILING_WHITESPACE COMMAND_ERROR_IS_FATAL ANY) - # Print for debugging - message(STATUS "LIBSAN_LIB_PATHS: ${LIBSAN_LIB_PATHS}") + # Print for debugging + message(STATUS "LIBSAN_LIB_PATHS: ${LIBSAN_LIB_PATHS}") - # Split the paths into a semicolon-separated list for CMake - string(REPLACE "\n" ";" LIBSAN_LIB_PATHS_LIST "${LIBSAN_LIB_PATHS}") + # Split the paths into a semicolon-separated list for CMake + string(REPLACE "\n" ";" LIBSAN_LIB_PATHS_LIST "${LIBSAN_LIB_PATHS}") - # Set RPATH to include all identified paths - set_target_properties(${target} PROPERTIES - BUILD_RPATH "${LIBSAN_LIB_PATHS_LIST}" - INSTALL_RPATH "${LIBSAN_LIB_PATHS_LIST}") + # Set RPATH to include all identified paths + set_target_properties(${target} PROPERTIES BUILD_RPATH "${LIBSAN_LIB_PATHS_LIST}" INSTALL_RPATH + "${LIBSAN_LIB_PATHS_LIST}") endif() # If DO_FANALYZER is specified and we're using gcc, then we can use -fanalyzer @@ -89,8 +81,7 @@ function(add_ddup_config target) target_compile_options(${target} PRIVATE -fanalyzer) endif() - # The main targets, ddup, crashtracker, stack_v2, and dd_wrapper are built - # as dynamic libraries, so PIC is required. And setting this is also fine - # for tests as they're loading those dynamic libraries. + # The main targets, ddup, crashtracker, stack_v2, and dd_wrapper are built as dynamic libraries, so PIC is required. + # And setting this is also fine for tests as they're loading those dynamic libraries. set_target_properties(${target} PROPERTIES POSITION_INDEPENDENT_CODE ON) endfunction() diff --git a/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake b/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake index 6e103fe7d70..3a96fbeb353 100644 --- a/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake +++ b/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake @@ -1,27 +1,106 @@ -# Only add this project if Datadog::Profiling is not already defined +# Only proceed if Datadog::Profiling (provided by libdatadog) isn't already defined if(TARGET Datadog::Profiling) return() endif() -include(ExternalProject) -set(TAG_LIBDATADOG - "v14.3.1" - CACHE STRING "libdatadog github tag") +# Set the FetchContent paths early +set(FETCHCONTENT_BASE_DIR + "${CMAKE_CURRENT_BINARY_DIR}/_deps" + CACHE PATH "FetchContent base directory") +set(FETCHCONTENT_DOWNLOADS_DIR + "${FETCHCONTENT_BASE_DIR}/downloads" + CACHE PATH "FetchContent downloads directory") -set(Datadog_BUILD_DIR ${CMAKE_BINARY_DIR}/libdatadog) -set(Datadog_ROOT ${Datadog_BUILD_DIR}/libdatadog-${TAG_LIBDATADOG}) +include_guard(GLOBAL) +include(FetchContent) -message(STATUS "${CMAKE_CURRENT_LIST_DIR}/tools/fetch_libdatadog.sh ${TAG_LIBDATADOG} ${Datadog_ROOT}") -execute_process(COMMAND "${CMAKE_CURRENT_LIST_DIR}/tools/fetch_libdatadog.sh" ${TAG_LIBDATADOG} ${Datadog_ROOT} - WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} COMMAND_ERROR_IS_FATAL ANY) +# Set version if not already set +if(NOT DEFINED TAG_LIBDATADOG) + set(TAG_LIBDATADOG + "v14.3.1" + CACHE STRING "libdatadog github tag") +endif() + +if(NOT DEFINED DD_CHECKSUMS) + set(DD_CHECKSUMS + "57f83aff275628bb1af89c22bb4bd696726daf2a9e09b6cd0d966b29e65a7ad6 libdatadog-aarch64-alpine-linux-musl.tar.gz" + "2be2efa98dfc32f109abdd79242a8e046a7a300c77634135eb293e000ecd4a4c libdatadog-aarch64-apple-darwin.tar.gz" + "36db8d50ccabb71571158ea13835c0f1d05d30b32135385f97c16343cfb6ddd4 libdatadog-aarch64-unknown-linux-gnu.tar.gz" + "2f61fd21cf2f8147743e414b4a8c77250a17be3aecc42a69ffe54f0a603d5c92 libdatadog-x86_64-alpine-linux-musl.tar.gz" + "f01f05600591063eba4faf388f54c155ab4e6302e5776c7855e3734955f7daf7 libdatadog-x86_64-unknown-linux-gnu.tar.gz") +endif() + +# Determine platform-specific tarball name in a way that conforms to the libdatadog naming scheme in Github releases +if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64") + set(DD_ARCH "aarch64") +elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|amd64") + set(DD_ARCH "x86_64") +else() + message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}") +endif() + +if(APPLE) + set(DD_PLATFORM "apple-darwin") +elseif(UNIX) + execute_process( + COMMAND ldd --version + OUTPUT_VARIABLE LDD_OUTPUT + ERROR_VARIABLE LDD_OUTPUT + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(LDD_OUTPUT MATCHES "musl") + set(DD_PLATFORM "alpine-linux-musl") + else() + set(DD_PLATFORM "unknown-linux-gnu") + endif() +else() + message(FATAL_ERROR "Unsupported operating system") +endif() + +set(DD_TARBALL "libdatadog-${DD_ARCH}-${DD_PLATFORM}.tar.gz") + +# Make sure we can get the checksum for the tarball +foreach(ENTRY IN LISTS DD_CHECKSUMS) + if(ENTRY MATCHES "^([a-fA-F0-9]+) ${DD_TARBALL}$") + set(DD_HASH "${CMAKE_MATCH_1}") + break() + endif() +endforeach() + +if(NOT DEFINED DD_HASH) + message(FATAL_ERROR "Could not find checksum for ${DD_TARBALL}") +endif() + +# Clean up any existing downloads if they exist +set(TARBALL_PATH "${FETCHCONTENT_DOWNLOADS_DIR}/${DD_TARBALL}") +if(EXISTS "${TARBALL_PATH}") + file(SHA256 "${TARBALL_PATH}" EXISTING_HASH) + if(NOT EXISTING_HASH STREQUAL DD_HASH) + file(REMOVE "${TARBALL_PATH}") + # Also remove the subbuild directory to force a fresh download + file(REMOVE_RECURSE "${CMAKE_CURRENT_BINARY_DIR}/_deps/libdatadog-subbuild") + endif() +endif() + +# Use FetchContent to download and extract the library +FetchContent_Declare( + libdatadog + URL "https://github.com/DataDog/libdatadog/releases/download/${TAG_LIBDATADOG}/${DD_TARBALL}" + URL_HASH SHA256=${DD_HASH} + DOWNLOAD_DIR "${FETCHCONTENT_DOWNLOADS_DIR}" SOURCE_DIR "${FETCHCONTENT_BASE_DIR}/libdatadog-src") + +# Make the content available +FetchContent_MakeAvailable(libdatadog) +# Set up paths +get_filename_component(Datadog_ROOT "${libdatadog_SOURCE_DIR}" ABSOLUTE) set(Datadog_DIR "${Datadog_ROOT}/cmake") -# Prefer static library to shared library +# Configure library preferences (static over shared) set(CMAKE_FIND_LIBRARY_SUFFIXES_BACKUP ${CMAKE_FIND_LIBRARY_SUFFIXES}) set(CMAKE_FIND_LIBRARY_SUFFIXES .a) +# Find the package find_package(Datadog REQUIRED) -# Restore CMAKE_FIND_LIBRARY_SUFFIXES +# Restore library preferences set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_BACKUP}) diff --git a/ddtrace/internal/datadog/profiling/cmake/tools/fetch_libdatadog.sh b/ddtrace/internal/datadog/profiling/cmake/tools/fetch_libdatadog.sh deleted file mode 100755 index a1e55066089..00000000000 --- a/ddtrace/internal/datadog/profiling/cmake/tools/fetch_libdatadog.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/bash -# http://redsymbol.net/articles/unofficial-bash-strict-mode/ -set -euox pipefail -IFS=$'\n\t' - -usage() { - echo "Usage :" - echo "$0 " - echo "" - echo "Example" - echo " $0 v0.7.0-rc.1 ./vendor" -} - -if [ $# != 2 ] || [ "$1" == "-h" ]; then - usage - exit 1 -fi - -SCRIPTPATH=$(readlink -f "$0") -SCRIPTDIR=$(dirname "$SCRIPTPATH") - -OS_NAME=$(uname -s) -MARCH=$(uname -m) - -TAG_LIBDATADOG=$1 -TARGET_EXTRACT=$2 - -CHECKSUM_FILE=${SCRIPTDIR}/libdatadog_checksums.txt - -# if os is darwin, set distribution to apple-darwin and march to aarch64 -if [[ "$OS_NAME" == "Darwin" ]]; then - DISTRIBUTION="apple-darwin" - # if march is arm64 set it to aarch64 - if [[ "$MARCH" == "arm64" ]]; then - MARCH="aarch64" - else - echo "Unsupported architecture $MARCH for $OS_NAME" - exit 1 - fi -elif [[ "$OS_NAME" == "Linux" ]]; then - # Test for musl - MUSL_LIBC=$(ldd /bin/ls | grep 'musl' | head -1 | cut -d ' ' -f1 || true) - if [[ -n ${MUSL_LIBC-""} ]]; then - DISTRIBUTION="alpine-linux-musl" - else - DISTRIBUTION="unknown-linux-gnu" - fi -else - echo "Unsupported OS $OS_NAME" - exit 1 -fi - -# https://github.com/DataDog/libdatadog/releases/download/v0.7.0-rc.1/libdatadog-aarch64-alpine-linux-musl.tar.gz -TAR_LIBDATADOG=libdatadog-${MARCH}-${DISTRIBUTION}.tar.gz -GITHUB_URL_LIBDATADOG=https://github.com/DataDog/libdatadog/releases/download/${TAG_LIBDATADOG}/${TAR_LIBDATADOG} - -SHA256_LIBDATADOG="blank" -while IFS=' ' read -r checksum filename; do - if [ "$filename" == "$TAR_LIBDATADOG" ]; then - SHA256_LIBDATADOG="$checksum $filename" - break - fi -done < "$CHECKSUM_FILE" - -if [ "$SHA256_LIBDATADOG" == "blank" ]; then - echo "Could not find checksum for ${TAR_LIBDATADOG} in ${CHECKSUM_FILE}" - exit 1 -else - echo "Using libdatadog sha256: ${SHA256_LIBDATADOG}" -fi - -mkdir -p "$TARGET_EXTRACT" || true -cd "$TARGET_EXTRACT" - -if [[ -e "${TAR_LIBDATADOG}" ]]; then - already_present=1 -else - already_present=0 - echo "Downloading libdatadog ${GITHUB_URL_LIBDATADOG}..." - if command -v curl > /dev/null 2>&1; then - curl -fsSLO "${GITHUB_URL_LIBDATADOG}" - elif command -v wget > /dev/null 2>&1; then - wget -q -O "${GITHUB_URL_LIBDATADOG##*/}" "${GITHUB_URL_LIBDATADOG}" - else - echo "Error: neither curl nor wget is available." >&2 - exit 1 - fi -fi - -echo "Checking libdatadog sha256" -if ! echo "${SHA256_LIBDATADOG}" | sha256sum -c -; then - echo "Error validating libdatadog SHA256" - echo "Please clear $TARGET_EXTRACT before restarting" - exit 1 -fi - -if [[ $already_present -eq 0 || ! -f "cmake/DatadogConfig.cmake" ]]; then - echo "Extracting ${TAR_LIBDATADOG}" - tar xf "${TAR_LIBDATADOG}" --strip-components=1 --no-same-owner -fi diff --git a/ddtrace/internal/datadog/profiling/cmake/tools/libdatadog_checksums.txt b/ddtrace/internal/datadog/profiling/cmake/tools/libdatadog_checksums.txt deleted file mode 100644 index ca856e996ae..00000000000 --- a/ddtrace/internal/datadog/profiling/cmake/tools/libdatadog_checksums.txt +++ /dev/null @@ -1,5 +0,0 @@ -57f83aff275628bb1af89c22bb4bd696726daf2a9e09b6cd0d966b29e65a7ad6 libdatadog-aarch64-alpine-linux-musl.tar.gz -2be2efa98dfc32f109abdd79242a8e046a7a300c77634135eb293e000ecd4a4c libdatadog-aarch64-apple-darwin.tar.gz -36db8d50ccabb71571158ea13835c0f1d05d30b32135385f97c16343cfb6ddd4 libdatadog-aarch64-unknown-linux-gnu.tar.gz -2f61fd21cf2f8147743e414b4a8c77250a17be3aecc42a69ffe54f0a603d5c92 libdatadog-x86_64-alpine-linux-musl.tar.gz -f01f05600591063eba4faf388f54c155ab4e6302e5776c7855e3734955f7daf7 libdatadog-x86_64-unknown-linux-gnu.tar.gz diff --git a/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt b/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt index a38b70fc224..c23a3e3ddce 100644 --- a/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt @@ -10,12 +10,11 @@ message(STATUS "Building extension: ${EXTENSION_NAME}") # Get the cmake modules for this project list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../cmake") -# Includes -include(FetchContent) -include(ExternalProject) -include(FindLibdatadog) - -add_subdirectory(../dd_wrapper ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build) +# Having a common target in a subdirectory like this is a hack and a mistake, but it's fiddly to change it so we haven't +# been able to. Instead, make sure that the binary path set in the subdirectory is stable *as a string* in order to make +# sure the caches work. +get_filename_component(DD_WRAPPER_BUILD_DIR ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build ABSOLUTE) +add_subdirectory(../dd_wrapper ${DD_WRAPPER_BUILD_DIR}) find_package(Python3 COMPONENTS Interpreter Development) @@ -45,12 +44,8 @@ add_custom_command( add_library(${EXTENSION_NAME} SHARED ${CRASHTRACKER_CPP_SRC}) add_ddup_config(${EXTENSION_NAME}) -# Cython generates code that produces errors for the following, so relax compile -# options -target_compile_options( - ${EXTENSION_NAME} - PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address -) +# Cython generates code that produces errors for the following, so relax compile options +target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address) # cmake may mutate the name of the library (e.g., lib- and -.so for dynamic libraries). This suppresses that behavior, # which is required to ensure all paths can be inferred correctly by setup.py. @@ -61,7 +56,7 @@ set_target_properties(${EXTENSION_NAME} PROPERTIES SUFFIX "") # typical. set_target_properties(${EXTENSION_NAME} PROPERTIES INSTALL_RPATH "$ORIGIN/..") target_include_directories(${EXTENSION_NAME} PRIVATE ../dd_wrapper/include ${Datadog_INCLUDE_DIRS} - ${Python3_INCLUDE_DIRS}) + ${Python3_INCLUDE_DIRS}) if(Python3_LIBRARIES) target_link_libraries(${EXTENSION_NAME} PRIVATE dd_wrapper ${Python3_LIBRARIES}) @@ -89,7 +84,7 @@ if(NOT CRASHTRACKER_EXE_TARGET_NAME) endif() set_target_properties(crashtracker_exe PROPERTIES INSTALL_RPATH "$ORIGIN/.." OUTPUT_NAME - ${CRASHTRACKER_EXE_TARGET_NAME}) + ${CRASHTRACKER_EXE_TARGET_NAME}) # To let crashtracker find Python library at runtime set_target_properties(crashtracker_exe PROPERTIES INSTALL_RPATH_USE_LINK_PATH TRUE) diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/CMakeLists.txt b/ddtrace/internal/datadog/profiling/dd_wrapper/CMakeLists.txt index 809569d8493..c427abdcfbc 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/CMakeLists.txt @@ -12,15 +12,24 @@ get_filename_component(dd_wrapper_BUILD_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../ddtr list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../cmake") # Includes -include(FetchContent) -include(ExternalProject) -include(FindLibdatadog) include(AnalysisFunc) include(FindClangtidy) include(FindCppcheck) include(FindInfer) include(CheckSymbolExists) +# Load libdatadog +include(FindLibdatadog) + +# Since this file is currently only loaded as a subdirectory, we need to propagate certain libdatadog variables up to +# the parent scope. +set(Datadog_INCLUDE_DIRS + ${Datadog_INCLUDE_DIRS} + PARENT_SCOPE) +set(Datadog_LIBRARIES + ${Datadog_LIBRARIES} + PARENT_SCOPE) + set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) @@ -51,7 +60,7 @@ target_include_directories(dd_wrapper PRIVATE include ${Datadog_INCLUDE_DIRS}) target_link_libraries(dd_wrapper PRIVATE ${Datadog_LIBRARIES} Threads::Threads) -# Figure out the suffix. Try to approximate the cpython way of doing things. C library +# Figure out the suffix. Try to approximate the cpython way of doing things. check_symbol_exists(__GLIBC__ "features.h" HAVE_GLIBC) check_symbol_exists(__MUSL__ "features.h" HAVE_MUSL) diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/ddup_interface.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/ddup_interface.hpp index cd18ead1966..0eec6ad87bc 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/ddup_interface.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/ddup_interface.hpp @@ -44,6 +44,9 @@ extern "C" void ddup_push_release(Datadog::Sample* sample, int64_t release_time, int64_t count); void ddup_push_alloc(Datadog::Sample* sample, int64_t size, int64_t count); void ddup_push_heap(Datadog::Sample* sample, int64_t size); + void ddup_push_gpu_gputime(Datadog::Sample* sample, int64_t time, int64_t count); + void ddup_push_gpu_memory(Datadog::Sample* sample, int64_t mem, int64_t count); + void ddup_push_gpu_flops(Datadog::Sample* sample, int64_t flops, int64_t count); void ddup_push_lock_name(Datadog::Sample* sample, std::string_view lock_name); void ddup_push_threadinfo(Datadog::Sample* sample, int64_t thread_id, @@ -56,11 +59,13 @@ extern "C" void ddup_push_trace_type(Datadog::Sample* sample, std::string_view trace_type); void ddup_push_exceptioninfo(Datadog::Sample* sample, std::string_view exception_type, int64_t count); void ddup_push_class_name(Datadog::Sample* sample, std::string_view class_name); + void ddup_push_gpu_device_name(Datadog::Sample*, std::string_view device_name); void ddup_push_frame(Datadog::Sample* sample, std::string_view _name, std::string_view _filename, uint64_t address, int64_t line); + void ddup_push_absolute_ns(Datadog::Sample* sample, int64_t timestamp_ns); void ddup_push_monotonic_ns(Datadog::Sample* sample, int64_t monotonic_ns); void ddup_flush_sample(Datadog::Sample* sample); // Stack v2 specific flush, which reverses the locations diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/libdatadog_helpers.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/libdatadog_helpers.hpp index 9952eab8e3f..03a302eb533 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/libdatadog_helpers.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/libdatadog_helpers.hpp @@ -45,7 +45,8 @@ namespace Datadog { X(local_root_span_id, "local root span id") \ X(trace_type, "trace type") \ X(class_name, "class name") \ - X(lock_name, "lock name") + X(lock_name, "lock name") \ + X(gpu_device_name, "gpu device name") #define X_ENUM(a, b) a, #define X_STR(a, b) b, diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample.hpp index 8ddf412bf89..38baf59aa97 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample.hpp @@ -100,6 +100,9 @@ class Sample bool push_release(int64_t lock_time, int64_t count); bool push_alloc(int64_t size, int64_t count); bool push_heap(int64_t size); + bool push_gpu_gputime(int64_t time, int64_t count); + bool push_gpu_memory(int64_t size, int64_t count); + bool push_gpu_flops(int64_t flops, int64_t count); // Adds metadata to sample bool push_lock_name(std::string_view lock_name); @@ -112,11 +115,15 @@ class Sample bool push_exceptioninfo(std::string_view exception_type, int64_t count); bool push_class_name(std::string_view class_name); bool push_monotonic_ns(int64_t monotonic_ns); + bool push_absolute_ns(int64_t timestamp_ns); // Interacts with static Sample state bool is_timeline_enabled() const; static void set_timeline(bool enabled); + // Pytorch GPU metadata + bool push_gpu_device_name(std::string_view device_name); + // Assumes frames are pushed in leaf-order void push_frame(std::string_view name, // for ddog_prof_Function std::string_view filename, // for ddog_prof_Function diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample_manager.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample_manager.hpp index baf6af2b33a..30c4048e967 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample_manager.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/sample_manager.hpp @@ -19,7 +19,6 @@ class SampleManager private: static inline unsigned int max_nframes{ g_default_max_nframes }; static inline SampleType type_mask{ SampleType::All }; - static inline std::mutex init_mutex{}; static inline size_t sample_pool_capacity{ g_default_sample_pool_capacity }; static inline std::unique_ptr sample_pool{ nullptr }; diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/types.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/types.hpp index 51785be2c93..3c62fa5d62f 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/types.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/types.hpp @@ -11,7 +11,10 @@ enum SampleType : unsigned int LockRelease = 1 << 4, Allocation = 1 << 5, Heap = 1 << 6, - All = CPU | Wall | Exception | LockAcquire | LockRelease | Allocation | Heap + GPUTime = 1 << 7, + GPUMemory = 1 << 8, + GPUFlops = 1 << 9, + All = CPU | Wall | Exception | LockAcquire | LockRelease | Allocation | Heap | GPUTime | GPUMemory | GPUFlops }; // Every Sample object has a corresponding `values` vector, since libdatadog expects contiguous values per sample. @@ -30,6 +33,12 @@ struct ValueIndex unsigned short alloc_space; unsigned short alloc_count; unsigned short heap_space; + unsigned short gpu_time; + unsigned short gpu_count; + unsigned short gpu_alloc_space; + unsigned short gpu_alloc_count; + unsigned short gpu_flops; + unsigned short gpu_flops_samples; // Should be "count," but flops is already a count }; } // namespace Datadog diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader.hpp index 8a5394b0cb2..ed19f316fc3 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader.hpp @@ -24,9 +24,7 @@ class Uploader private: static inline std::mutex upload_lock{}; std::string errmsg; - static inline std::unique_ptr cancel{ - ddog_CancellationToken_new() - }; + static inline std::unique_ptr cancel; static inline std::atomic upload_seq{ 0 }; std::string output_filename; std::unique_ptr ddog_exporter; diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader_builder.hpp b/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader_builder.hpp index 62ee6aad853..7077096c744 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader_builder.hpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/include/uploader_builder.hpp @@ -43,6 +43,8 @@ class UploaderBuilder static void set_output_filename(std::string_view _output_filename); static std::variant build(); + + static void postfork_child(); }; } // namespace Datadog diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/code_provenance.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/code_provenance.cpp index 0a4a49a4ce5..f3147cd2034 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/code_provenance.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/code_provenance.cpp @@ -14,9 +14,8 @@ namespace Datadog { void Datadog::CodeProvenance::postfork_child() { - get_instance().mtx.~mutex(); // Destroy the mutex + // NB placement-new to re-init and leak the mutex because doing anything else is UB new (&get_instance().mtx) std::mutex(); // Recreate the mutex - get_instance().reset(); // Reset the state } void diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/ddup_interface.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/ddup_interface.cpp index baee51a7eda..9b52cbcaf6d 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/ddup_interface.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/ddup_interface.cpp @@ -24,6 +24,7 @@ ddup_postfork_child() Datadog::Uploader::postfork_child(); Datadog::SampleManager::postfork_child(); Datadog::CodeProvenance::postfork_child(); + Datadog::UploaderBuilder::postfork_child(); } void @@ -193,6 +194,24 @@ ddup_push_heap(Datadog::Sample* sample, int64_t size) // cppcheck-suppress unuse sample->push_heap(size); } +void +ddup_push_gpu_gputime(Datadog::Sample* sample, int64_t time, int64_t count) // cppcheck-suppress unusedFunction +{ + sample->push_gpu_gputime(time, count); +} + +void +ddup_push_gpu_memory(Datadog::Sample* sample, int64_t size, int64_t count) // cppcheck-suppress unusedFunction +{ + sample->push_gpu_memory(size, count); +} + +void +ddup_push_gpu_flops(Datadog::Sample* sample, int64_t flops, int64_t count) // cppcheck-suppress unusedFunction +{ + sample->push_gpu_flops(flops, count); +} + void ddup_push_lock_name(Datadog::Sample* sample, std::string_view lock_name) // cppcheck-suppress unusedFunction { @@ -252,6 +271,12 @@ ddup_push_class_name(Datadog::Sample* sample, std::string_view class_name) // cp sample->push_class_name(class_name); } +void +ddup_push_gpu_device_name(Datadog::Sample* sample, std::string_view gpu_device_name) // cppcheck-suppress unusedFunction +{ + sample->push_gpu_device_name(gpu_device_name); +} + void ddup_push_frame(Datadog::Sample* sample, // cppcheck-suppress unusedFunction std::string_view _name, @@ -262,6 +287,12 @@ ddup_push_frame(Datadog::Sample* sample, // cppcheck-suppress unusedFunction sample->push_frame(_name, _filename, address, line); } +void +ddup_push_absolute_ns(Datadog::Sample* sample, int64_t timestamp_ns) // cppcheck-suppress unusedFunction +{ + sample->push_absolute_ns(timestamp_ns); +} + void ddup_push_monotonic_ns(Datadog::Sample* sample, int64_t monotonic_ns) // cppcheck-suppress unusedFunction { diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/profile.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/profile.cpp index f9f7a3e9585..860f9c7cd3e 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/profile.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/profile.cpp @@ -89,6 +89,23 @@ Datadog::Profile::setup_samplers() if (0U != (type_mask & SampleType::Heap)) { val_idx.heap_space = get_value_idx("heap-space", "bytes"); } + if (0U != (type_mask & SampleType::GPUTime)) { + val_idx.gpu_time = get_value_idx("gpu-time", "nanoseconds"); + val_idx.gpu_count = get_value_idx("gpu-samples", "count"); + } + if (0U != (type_mask & SampleType::GPUMemory)) { + // In the backend the unit is called 'gpu-space', but maybe for consistency + // it should be gpu-alloc-space + // gpu-alloc-samples may be unused, but it's passed along for scaling purposes + val_idx.gpu_alloc_space = get_value_idx("gpu-space", "bytes"); + val_idx.gpu_alloc_count = get_value_idx("gpu-alloc-samples", "count"); + } + if (0U != (type_mask & SampleType::GPUFlops)) { + // Technically "FLOPS" is a unit, but we call it a 'count' because no + // other profiler uses it as a unit. + val_idx.gpu_flops = get_value_idx("gpu-flops", "count"); + val_idx.gpu_flops_samples = get_value_idx("gpu-flops-samples", "count"); + } // Whatever the first sampler happens to be is the default "period" for the profile // The value of 1 is a pointless default. @@ -186,6 +203,6 @@ Datadog::Profile::collect(const ddog_prof_Sample& sample, int64_t endtime_ns) void Datadog::Profile::postfork_child() { - profile_mtx.unlock(); + new (&profile_mtx) std::mutex(); cycle_buffers(); } diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/sample.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/sample.cpp index bc0a316bcc3..4483a021803 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/sample.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/sample.cpp @@ -262,6 +262,42 @@ Datadog::Sample::push_heap(int64_t size) return false; } +bool +Datadog::Sample::push_gpu_gputime(int64_t time, int64_t count) +{ + if (0U != (type_mask & SampleType::GPUTime)) { + values[profile_state.val().gpu_time] += time * count; + values[profile_state.val().gpu_count] += count; + return true; + } + std::cout << "bad push gpu" << std::endl; + return false; +} + +bool +Datadog::Sample::push_gpu_memory(int64_t size, int64_t count) +{ + if (0U != (type_mask & SampleType::GPUMemory)) { + values[profile_state.val().gpu_alloc_space] += size * count; + values[profile_state.val().gpu_alloc_count] += count; + return true; + } + std::cout << "bad push gpu memory" << std::endl; + return false; +} + +bool +Datadog::Sample::push_gpu_flops(int64_t size, int64_t count) +{ + if (0U != (type_mask & SampleType::GPUFlops)) { + values[profile_state.val().gpu_flops] += size * count; + values[profile_state.val().gpu_flops_samples] += count; + return true; + } + std::cout << "bad push gpu flops" << std::endl; + return false; +} + bool Datadog::Sample::push_lock_name(std::string_view lock_name) { @@ -351,6 +387,27 @@ Datadog::Sample::push_class_name(std::string_view class_name) return true; } +bool +Datadog::Sample::push_gpu_device_name(std::string_view device_name) +{ + if (!push_label(ExportLabelKey::gpu_device_name, device_name)) { + std::cout << "bad push" << std::endl; + return false; + } + return true; +} + +bool +Datadog::Sample::push_absolute_ns(int64_t _timestamp_ns) +{ + // If timeline is not enabled, then this is a no-op + if (is_timeline_enabled()) { + endtime_ns = _timestamp_ns; + } + + return true; +} + bool Datadog::Sample::push_monotonic_ns(int64_t _monotonic_ns) { diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader.cpp index 1e04a45fb41..325771946d8 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader.cpp @@ -109,20 +109,20 @@ Datadog::Uploader::upload(ddog_prof_Profile& profile) return false; } - // If we're here, we're about to create a new upload, so cancel any inflight ones - cancel_inflight(); - - // Create a new cancellation token. Maybe we can get away without doing this, but - // since we're recreating the uploader fresh every time anyway, we recreate one more thing. - // NB wrapping this in a unique_ptr to easily add RAII semantics; maybe should just wrap it in a - // class instead. - std::unique_ptr cancel_for_request( - ddog_CancellationToken_clone(cancel.get())); - // The upload operation sets up some global state in libdatadog (the tokio runtime), so // we ensure exclusivity here. { + // If we're here, we're about to create a new upload, so cancel any inflight ones const std::lock_guard lock_guard(upload_lock); + cancel_inflight(); + + // Create a new cancellation token. Maybe we can get away without doing this, but + // since we're recreating the uploader fresh every time anyway, we recreate one more thing. + // NB wrapping this in a unique_ptr to easily add RAII semantics; maybe should just wrap it in a + // class instead + cancel.reset(ddog_CancellationToken_new()); + std::unique_ptr cancel_for_request; + cancel_for_request.reset(ddog_CancellationToken_clone(cancel.get())); // Build and check the response object ddog_prof_Exporter_Request* req = build_res.ok; // NOLINT (cppcoreguidelines-pro-type-union-access) @@ -156,7 +156,7 @@ Datadog::Uploader::unlock() void Datadog::Uploader::cancel_inflight() { - ddog_CancellationToken_cancel(cancel.get()); + cancel.reset(); } void @@ -175,5 +175,6 @@ Datadog::Uploader::postfork_parent() void Datadog::Uploader::postfork_child() { - unlock(); + // NB placement-new to re-init and leak the mutex because doing anything else is UB + new (&upload_lock) std::mutex(); } diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader_builder.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader_builder.cpp index 0661b7f217f..8ff5d45e7c2 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader_builder.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/uploader_builder.cpp @@ -186,3 +186,10 @@ Datadog::UploaderBuilder::build() return Datadog::Uploader{ output_filename, ddog_exporter }; } + +void +Datadog::UploaderBuilder::postfork_child() +{ + // NB placement-new to re-init and leak the mutex because doing anything else is UB + new (&tag_mutex) std::mutex(); +} diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/test/CMakeLists.txt b/ddtrace/internal/datadog/profiling/dd_wrapper/test/CMakeLists.txt index 0be6098cd2a..66dac6b6f0d 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/test/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/test/CMakeLists.txt @@ -12,6 +12,24 @@ FetchContent_MakeAvailable(googletest) include(GoogleTest) include(AnalysisFunc) +if(DO_VALGRIND) + find_program( + VALGRIND_EXECUTABLE + NAMES valgrind + PATHS /usr/bin /usr/local/bin) + + if(VALGRIND_EXECUTABLE) + set(MEMORYCHECK_COMMAND "${VALGRIND_EXECUTABLE}") + set(MEMORYCHECK_COMMAND_OPTIONS + "--leak-check=full --show-leak-kinds=definite --errors-for-leak-kinds=definite --trace-children=yes --error-exitcode=1 --log-fd=1 --suppressions=${CMAKE_CURRENT_SOURCE_DIR}/valgrind.supp" + ) + else() + message(FATAL_ERROR "Valgrind not found") + endif() + + include(CTest) +endif() + FetchContent_Declare(json URL https://github.com/nlohmann/json/releases/download/v3.11.3/json.tar.xz) FetchContent_MakeAvailable(json) @@ -21,18 +39,16 @@ function(dd_wrapper_add_test name) target_link_libraries(${name} PRIVATE gmock gtest_main dd_wrapper nlohmann_json::nlohmann_json) add_ddup_config(${name}) - gtest_discover_tests(${name} - PROPERTIES - # We start new threads after fork(), and we want to continue - # running the tests after that instead of dying. - ENVIRONMENT "TSAN_OPTIONS=die_after_fork=0:suppressions=${CMAKE_CURRENT_SOURCE_DIR}/TSan.supp" - ) + gtest_discover_tests( + ${name} + PROPERTIES # We start new threads after fork(), and we want to continue running the tests after that instead of + # dying. + ENVIRONMENT "TSAN_OPTIONS=die_after_fork=0:suppressions=${CMAKE_CURRENT_SOURCE_DIR}/TSan.supp") set_target_properties(${name} PROPERTIES INSTALL_RPATH "$ORIGIN/..") if(LIB_INSTALL_DIR) - install(TARGETS ${name} - RUNTIME DESTINATION ${LIB_INSTALL_DIR}/../test) + install(TARGETS ${name} RUNTIME DESTINATION ${LIB_INSTALL_DIR}/../test) endif() endfunction() diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/test/valgrind.supp b/ddtrace/internal/datadog/profiling/dd_wrapper/test/valgrind.supp new file mode 100644 index 00000000000..d8534d2a228 --- /dev/null +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/test/valgrind.supp @@ -0,0 +1,7 @@ +{ + ddcommon_uninitialized_value + Memcheck:Cond + fun:eq + ... + fun:*ddcommon*entity_id*unix*container_id* +} diff --git a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt index 8aeb0f1c23a..6a4cb4e8803 100644 --- a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt @@ -13,14 +13,11 @@ message(STATUS "Building extension: ${EXTENSION_NAME}") # Get the cmake modules for this project list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../cmake") -# Includes -include(FetchContent) -include(ExternalProject) -include(FindLibdatadog) - -# Technically, this should be its own project which we `include()`, but I don't want to deal with that when so many -# things may yet be factored differently. -add_subdirectory(../dd_wrapper ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build) +# Having a common target in a subdirectory like this is a hack and a mistake, but it's fiddly to change it so we haven't +# been able to. Instead, make sure that the binary path set in the subdirectory is stable *as a string* in order to make +# sure the caches work. +get_filename_component(DD_WRAPPER_BUILD_DIR ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build ABSOLUTE) +add_subdirectory(../dd_wrapper ${DD_WRAPPER_BUILD_DIR}) find_package(Python3 COMPONENTS Interpreter Development) @@ -50,12 +47,8 @@ add_custom_command( add_library(${EXTENSION_NAME} SHARED ${DDUP_CPP_SRC}) add_ddup_config(${EXTENSION_NAME}) -# Cython generates code that produces errors for the following, so relax compile -# options -target_compile_options( - ${EXTENSION_NAME} - PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address -) +# Cython generates code that produces errors for the following, so relax compile options +target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address) # cmake may mutate the name of the library (e.g., lib- and -.so for dynamic libraries). This suppresses that behavior, # which is required to ensure all paths can be inferred correctly by setup.py. @@ -66,7 +59,7 @@ set_target_properties(${EXTENSION_NAME} PROPERTIES SUFFIX "") # typical. set_target_properties(${EXTENSION_NAME} PROPERTIES INSTALL_RPATH "$ORIGIN/..") target_include_directories(${EXTENSION_NAME} PRIVATE ../dd_wrapper/include ${Datadog_INCLUDE_DIRS} - ${Python3_INCLUDE_DIRS}) + ${Python3_INCLUDE_DIRS}) if(Python3_LIBRARIES) target_link_libraries(${EXTENSION_NAME} PRIVATE dd_wrapper ${Python3_LIBRARIES}) diff --git a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyi b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyi index 552e377df0b..2f466b62af3 100644 --- a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyi +++ b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyi @@ -3,6 +3,7 @@ from typing import Optional from typing import Union from .._types import StringType from ddtrace._trace.span import Span +from ddtrace._trace.tracer import Tracer def config( env: StringType, @@ -16,22 +17,27 @@ def config( enable_code_provenance: Optional[bool], ) -> None: ... def start() -> None: ... -def upload() -> None: ... +def upload(tracer: Optional[Tracer]) -> None: ... class SampleHandle: - def push_cputime(self, value: int, count: int) -> None: ... - def push_walltime(self, value: int, count: int) -> None: ... + def flush_sample(self) -> None: ... + def push_absolute_ns(self, timestamp_ns: int) -> None: ... def push_acquire(self, value: int, count: int) -> None: ... - def push_release(self, value: int, count: int) -> None: ... def push_alloc(self, value: int, count: int) -> None: ... + def push_class_name(self, class_name: StringType) -> None: ... + def push_cputime(self, value: int, count: int) -> None: ... + def push_exceptioninfo(self, exc_type: Union[None, bytes, str, type], count: int) -> None: ... + def push_frame(self, name: StringType, filename: StringType, address: int, line: int) -> None: ... + def push_gpu_device_name(self, device_name: StringType) -> None: ... + def push_gpu_flops(self, value: int, count: int) -> None: ... + def push_gpu_gputime(self, value: int, count: int) -> None: ... + def push_gpu_memory(self, value: int, count: int) -> None: ... def push_heap(self, value: int) -> None: ... def push_lock_name(self, lock_name: StringType) -> None: ... - def push_frame(self, name: StringType, filename: StringType, address: int, line: int) -> None: ... - def push_threadinfo(self, thread_id: int, thread_native_id: int, thread_name: StringType) -> None: ... + def push_monotonic_ns(self, monotonic_ns: int) -> None: ... + def push_release(self, value: int, count: int) -> None: ... + def push_span(self, span: Optional[Span]) -> None: ... def push_task_id(self, task_id: Optional[int]) -> None: ... def push_task_name(self, task_name: StringType) -> None: ... - def push_exceptioninfo(self, exc_type: Union[None, bytes, str, type], count: int) -> None: ... - def push_class_name(self, class_name: StringType) -> None: ... - def push_span(self, span: Optional[Span]) -> None: ... - def push_monotonic_ns(self, monotonic_ns: int) -> None: ... - def flush_sample(self) -> None: ... + def push_threadinfo(self, thread_id: int, thread_native_id: int, thread_name: StringType) -> None: ... + def push_walltime(self, value: int, count: int) -> None: ... diff --git a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx index b3f9b264890..5b8b6add921 100644 --- a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx +++ b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx @@ -20,6 +20,7 @@ from ddtrace.internal.constants import DEFAULT_SERVICE_NAME from ddtrace.internal.packages import get_distributions from ddtrace.internal.runtime import get_runtime_id from ddtrace._trace.span import Span +from ddtrace._trace.tracer import Tracer ctypedef void (*func_ptr_t)(string_view) @@ -67,6 +68,9 @@ cdef extern from "ddup_interface.hpp": void ddup_push_release(Sample *sample, int64_t release_time, int64_t count) void ddup_push_alloc(Sample *sample, int64_t size, int64_t count) void ddup_push_heap(Sample *sample, int64_t size) + void ddup_push_gpu_gputime(Sample *sample, int64_t gputime, int64_t count) + void ddup_push_gpu_memory(Sample *sample, int64_t size, int64_t count) + void ddup_push_gpu_flops(Sample *sample, int64_t flops, int64_t count) void ddup_push_lock_name(Sample *sample, string_view lock_name) void ddup_push_threadinfo(Sample *sample, int64_t thread_id, int64_t thread_native_id, string_view thread_name) void ddup_push_task_id(Sample *sample, int64_t task_id) @@ -76,8 +80,10 @@ cdef extern from "ddup_interface.hpp": void ddup_push_trace_type(Sample *sample, string_view trace_type) void ddup_push_exceptioninfo(Sample *sample, string_view exception_type, int64_t count) void ddup_push_class_name(Sample *sample, string_view class_name) + void ddup_push_gpu_device_name(Sample *sample, string_view device_name) void ddup_push_frame(Sample *sample, string_view _name, string_view _filename, uint64_t address, int64_t line) void ddup_push_monotonic_ns(Sample *sample, int64_t monotonic_ns) + void ddup_push_absolute_ns(Sample *sample, int64_t monotonic_ns) void ddup_flush_sample(Sample *sample) void ddup_drop_sample(Sample *sample) @@ -301,6 +307,18 @@ cdef call_ddup_push_class_name(Sample* sample, class_name: StringType): if utf8_data != NULL: ddup_push_class_name(sample, string_view(utf8_data, utf8_size)) +cdef call_ddup_push_gpu_device_name(Sample* sample, device_name: StringType): + if not device_name: + return + if isinstance(device_name, bytes): + ddup_push_gpu_device_name(sample, string_view(device_name, len(device_name))) + return + cdef const char* utf8_data + cdef Py_ssize_t utf8_size + utf8_data = PyUnicode_AsUTF8AndSize(device_name, &utf8_size) + if utf8_data != NULL: + ddup_push_gpu_device_name(sample, string_view(utf8_data, utf8_size)) + cdef call_ddup_push_trace_type(Sample* sample, trace_type: StringType): if not trace_type: return @@ -396,16 +414,16 @@ def _get_endpoint(tracer)-> str: return endpoint -def upload() -> None: +def upload(tracer: Optional[Tracer] = ddtrace.tracer) -> None: call_func_with_str(ddup_set_runtime_id, get_runtime_id()) - processor = ddtrace.tracer._endpoint_call_counter_span_processor + processor = tracer._endpoint_call_counter_span_processor endpoint_counts, endpoint_to_span_ids = processor.reset() call_ddup_profile_set_endpoints(endpoint_to_span_ids) call_ddup_profile_add_endpoint_counts(endpoint_counts) - endpoint = _get_endpoint(ddtrace.tracer) + endpoint = _get_endpoint(tracer) call_func_with_str(ddup_config_url, endpoint) with nogil: @@ -447,6 +465,18 @@ cdef class SampleHandle: if self.ptr is not NULL: ddup_push_heap(self.ptr, clamp_to_int64_unsigned(value)) + def push_gpu_gputime(self, value: int, count: int) -> None: + if self.ptr is not NULL: + ddup_push_gpu_gputime(self.ptr, clamp_to_int64_unsigned(value), clamp_to_int64_unsigned(count)) + + def push_gpu_memory(self, value: int, count: int) -> None: + if self.ptr is not NULL: + ddup_push_gpu_memory(self.ptr, clamp_to_int64_unsigned(value), clamp_to_int64_unsigned(count)) + + def push_gpu_flops(self, value: int, count: int) -> None: + if self.ptr is not NULL: + ddup_push_gpu_flops(self.ptr, clamp_to_int64_unsigned(value), clamp_to_int64_unsigned(count)) + def push_lock_name(self, lock_name: StringType) -> None: if self.ptr is not NULL: call_ddup_push_lock_name(self.ptr, lock_name) @@ -493,6 +523,10 @@ cdef class SampleHandle: if self.ptr is not NULL: call_ddup_push_class_name(self.ptr, class_name) + def push_gpu_device_name(self, device_name: StringType) -> None: + if self.ptr is not NULL: + call_ddup_push_gpu_device_name(self.ptr, device_name) + def push_span(self, span: Optional[Span]) -> None: if self.ptr is NULL: return @@ -511,6 +545,10 @@ cdef class SampleHandle: if self.ptr is not NULL: ddup_push_monotonic_ns(self.ptr, monotonic_ns) + def push_absolute_ns(self, timestamp_ns: int) -> None: + if self.ptr is not NULL: + ddup_push_absolute_ns(self.ptr, timestamp_ns) + def flush_sample(self) -> None: # Flushing the sample consumes it. The user will no longer be able to use # this handle after flushing it. diff --git a/ddtrace/internal/datadog/profiling/stack_v2/CMakeLists.txt b/ddtrace/internal/datadog/profiling/stack_v2/CMakeLists.txt index 50c8d056c79..77952e09d41 100644 --- a/ddtrace/internal/datadog/profiling/stack_v2/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/stack_v2/CMakeLists.txt @@ -11,16 +11,17 @@ message(STATUS "Building extension: ${EXTENSION_NAME}") # Custom cmake modules are in the parent directory list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../cmake") +# Having a common target in a subdirectory like this is a hack and a mistake, but it's fiddly to change it so we haven't +# been able to. Instead, make sure that the binary path set in the subdirectory is stable *as a string* in order to make +# sure the caches work. +get_filename_component(DD_WRAPPER_BUILD_DIR ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build ABSOLUTE) +add_subdirectory(../dd_wrapper ${DD_WRAPPER_BUILD_DIR}) + # Includes include(FetchContent) -include(ExternalProject) include(AnalysisFunc) include(FindCppcheck) -# dd_wrapper should be its own project at one point, if the current design is kept, but whether or not we keep that -# design is unknown. Hack it for now. -add_subdirectory(../dd_wrapper ${CMAKE_CURRENT_BINARY_DIR}/../dd_wrapper_build) - find_package(Python3 COMPONENTS Interpreter Development) # Make sure we have necessary Python variables @@ -74,9 +75,9 @@ target_compile_definitions(${EXTENSION_NAME} PRIVATE UNWIND_NATIVE_DISABLE) # warning(push, 0 then pop for the same effect. target_include_directories( ${EXTENSION_NAME} PRIVATE .. # include dd_wrapper from the root in order to make its paths transparent in the code - include) + include) target_include_directories(${EXTENSION_NAME} SYSTEM PRIVATE ${echion_SOURCE_DIR} ${Python3_INCLUDE_DIRS} - include/vendored include/util) + include/vendored include/util) # Echion sources need to be given the current platform if(WIN32) @@ -115,4 +116,3 @@ if(BUILD_TESTING) enable_testing() add_subdirectory(test) endif() - diff --git a/ddtrace/internal/datadog/profiling/stack_v2/src/sampler.cpp b/ddtrace/internal/datadog/profiling/stack_v2/src/sampler.cpp index c05ae45477e..7ad9ad692b2 100644 --- a/ddtrace/internal/datadog/profiling/stack_v2/src/sampler.cpp +++ b/ddtrace/internal/datadog/profiling/stack_v2/src/sampler.cpp @@ -67,6 +67,11 @@ _stack_v2_atfork_child() // so we don't even reveal this function to the user _set_pid(getpid()); ThreadSpanLinks::postfork_child(); + + // `thread_info_map_lock` and `task_link_map_lock` are global locks held in echion + // NB placement-new to re-init and leak the mutex because doing anything else is UB + new (&thread_info_map_lock) std::mutex; + new (&task_link_map_lock) std::mutex; } __attribute__((constructor)) void diff --git a/ddtrace/internal/datadog/profiling/stack_v2/src/thread_span_links.cpp b/ddtrace/internal/datadog/profiling/stack_v2/src/thread_span_links.cpp index c777ff8a510..6be43a04a42 100644 --- a/ddtrace/internal/datadog/profiling/stack_v2/src/thread_span_links.cpp +++ b/ddtrace/internal/datadog/profiling/stack_v2/src/thread_span_links.cpp @@ -53,10 +53,8 @@ ThreadSpanLinks::reset() void ThreadSpanLinks::postfork_child() { - // Explicitly destroy and reconstruct the mutex to avoid undefined behavior - get_instance().mtx.~mutex(); + // NB placement-new to re-init and leak the mutex because doing anything else is UB new (&get_instance().mtx) std::mutex(); - get_instance().reset(); } diff --git a/ddtrace/internal/datadog/profiling/stack_v2/test/CMakeLists.txt b/ddtrace/internal/datadog/profiling/stack_v2/test/CMakeLists.txt index 926f9b28af7..423f927d8f1 100644 --- a/ddtrace/internal/datadog/profiling/stack_v2/test/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/stack_v2/test/CMakeLists.txt @@ -12,6 +12,24 @@ FetchContent_MakeAvailable(googletest) include(GoogleTest) include(AnalysisFunc) +if(DO_VALGRIND) + find_program( + VALGRIND_EXECUTABLE + NAMES valgrind + PATHS /usr/bin /usr/local/bin) + + if(VALGRIND_EXECUTABLE) + set(MEMORYCHECK_COMMAND "${VALGRIND_EXECUTABLE}") + set(MEMORYCHECK_COMMAND_OPTIONS + "--leak-check=full --show-leak-kinds=definite --errors-for-leak-kinds=definite --trace-children=yes --error-exitcode=1 --log-fd=1 --suppressions=${CMAKE_CURRENT_SOURCE_DIR}/valgrind.supp" + ) + else() + message(FATAL_ERROR "Valgrind not found") + endif() + + include(CTest) +endif() + function(dd_wrapper_add_test name) add_executable(${name} ${ARGN}) target_include_directories(${name} PRIVATE ../include) @@ -29,8 +47,7 @@ function(dd_wrapper_add_test name) endif() if(LIB_INSTALL_DIR) - install(TARGETS ${name} - RUNTIME DESTINATION ${LIB_INSTALL_DIR}/../test) + install(TARGETS ${name} RUNTIME DESTINATION ${LIB_INSTALL_DIR}/../test) endif() endfunction() diff --git a/ddtrace/internal/datadog/profiling/stack_v2/test/valgrind.supp b/ddtrace/internal/datadog/profiling/stack_v2/test/valgrind.supp new file mode 100644 index 00000000000..d8534d2a228 --- /dev/null +++ b/ddtrace/internal/datadog/profiling/stack_v2/test/valgrind.supp @@ -0,0 +1,7 @@ +{ + ddcommon_uninitialized_value + Memcheck:Cond + fun:eq + ... + fun:*ddcommon*entity_id*unix*container_id* +} diff --git a/ddtrace/internal/datastreams/botocore.py b/ddtrace/internal/datastreams/botocore.py index ec004f1ff9a..aeafa70ec2e 100644 --- a/ddtrace/internal/datastreams/botocore.py +++ b/ddtrace/internal/datastreams/botocore.py @@ -187,6 +187,10 @@ def handle_sqs_receive(_, params, result, *args): log.debug("Error receiving SQS message with data streams monitoring enabled", exc_info=True) +class StreamMetadataNotFound(Exception): + pass + + def record_data_streams_path_for_kinesis_stream(params, time_estimate, context_json, record): from . import data_streams_processor as processor @@ -194,7 +198,7 @@ def record_data_streams_path_for_kinesis_stream(params, time_estimate, context_j if not stream: log.debug("Unable to determine StreamARN and/or StreamName for request with params: ", params) - return + raise StreamMetadataNotFound() payload_size = calculate_kinesis_payload_size(record) ctx = DsmPathwayCodec.decode(context_json, processor()) @@ -210,7 +214,7 @@ def handle_kinesis_receive(_, params, time_estimate, context_json, record, *args try: record_data_streams_path_for_kinesis_stream(params, time_estimate, context_json, record) except Exception: - log.debug("Failed to report data streams monitoring info for kinesis", exc_info=True) + log.warning("Failed to report data streams monitoring info for kinesis", exc_info=True) if config._data_streams_enabled: diff --git a/ddtrace/internal/http.py b/ddtrace/internal/http.py index c8e6c772d6b..34cee7e0793 100644 --- a/ddtrace/internal/http.py +++ b/ddtrace/internal/http.py @@ -1,10 +1,15 @@ from ddtrace.internal.compat import httplib from ddtrace.internal.compat import parse +from ddtrace.internal.runtime import container -class BasePathMixin(httplib.HTTPConnection, object): +class HTTPConnectionMixin: """ - Mixin for HTTPConnection to insert a base path to requested URLs + Mixin for HTTP(S) connections for performing internal adjustments. + + Currently this mixin performs the following adjustments: + - insert a base path to requested URLs + - update headers with container info """ _base_path = "/" # type: str @@ -12,7 +17,7 @@ class BasePathMixin(httplib.HTTPConnection, object): def putrequest(self, method, url, skip_host=False, skip_accept_encoding=False): # type: (str, str, bool, bool) -> None url = parse.urljoin(self._base_path, url) - return super(BasePathMixin, self).putrequest( + return super().putrequest( # type: ignore[misc] method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding ) @@ -23,14 +28,21 @@ def with_base_path(cls, *args, **kwargs): obj._base_path = base_path return obj + def request(self, method, url, body=None, headers={}, *, encode_chunked=False): + _headers = headers.copy() + + container.update_headers(_headers) + + return super().request(method, url, body=body, headers=_headers, encode_chunked=encode_chunked) + -class HTTPConnection(BasePathMixin, httplib.HTTPConnection): +class HTTPConnection(HTTPConnectionMixin, httplib.HTTPConnection): """ httplib.HTTPConnection wrapper to add a base path to requested URLs """ -class HTTPSConnection(BasePathMixin, httplib.HTTPSConnection): +class HTTPSConnection(HTTPConnectionMixin, httplib.HTTPSConnection): """ httplib.HTTPSConnection wrapper to add a base path to requested URLs """ diff --git a/ddtrace/internal/injection.py b/ddtrace/internal/injection.py index d6fa2715ec7..787e0160e66 100644 --- a/ddtrace/internal/injection.py +++ b/ddtrace/internal/injection.py @@ -25,8 +25,25 @@ class InvalidLine(Exception): """ +# DEV: This is the bytecode equivalent of +# >>> hook(arg) +# Additionally, we must discard the return value (top of the stack) to restore +# the stack to the state prior to the call. + INJECTION_ASSEMBLY = Assembly() -if PY >= (3, 12): +if PY >= (3, 14): + raise NotImplementedError("Python >= 3.14 is not supported yet") +elif PY >= (3, 13): + INJECTION_ASSEMBLY.parse( + r""" + load_const {hook} + push_null + load_const {arg} + call 1 + pop_top + """ + ) +elif PY >= (3, 12): INJECTION_ASSEMBLY.parse( r""" push_null @@ -91,15 +108,11 @@ def _inject_hook(code: Bytecode, hook: HookType, lineno: int, arg: Any) -> None: if not locs: raise InvalidLine("Line %d does not exist or is either blank or a comment" % lineno) - # DEV: This is the bytecode equivalent of - # >>> hook(arg) - # Additionally, we must discard the return value (top of the stack) to - # restore the stack to the state prior to the call. for i in locs: code[i:i] = INJECTION_ASSEMBLY.bind(dict(hook=hook, arg=arg), lineno=lineno) -_INJECT_HOOK_OPCODE_POS = 0 if PY < (3, 11) else 1 +_INJECT_HOOK_OPCODE_POS = 1 if (3, 11) <= PY < (3, 13) else 0 _INJECT_ARG_OPCODE_POS = 1 if PY < (3, 11) else 2 diff --git a/ddtrace/internal/packages.py b/ddtrace/internal/packages.py index 8b369b9709c..ab4023d93dd 100644 --- a/ddtrace/internal/packages.py +++ b/ddtrace/internal/packages.py @@ -59,26 +59,38 @@ def get_package_distributions() -> t.Mapping[str, t.List[str]]: return _packages_distributions() -@cached(maxsize=256) -def get_module_distribution_versions(module_name: str) -> t.Dict[str, str]: +@cached(maxsize=1024) +def get_module_distribution_versions(module_name: str) -> t.Optional[t.Tuple[str, str]]: + if not module_name: + return None try: import importlib.metadata as importlib_metadata except ImportError: import importlib_metadata # type: ignore[no-redef] - try: - return { - module_name: importlib_metadata.distribution(module_name).version, - } - except importlib_metadata.PackageNotFoundError: - pass - + names: t.List[str] = [] pkgs = get_package_distributions() - names = pkgs.get(module_name) - if not names: - return {} - - return {name: get_version_for_package(name) for name in names} + while names == []: + try: + return ( + module_name, + importlib_metadata.distribution(module_name).version, + ) + except Exception: # nosec + pass + names = pkgs.get(module_name, []) + if not names: + # try to resolve the parent package + p = module_name.rfind(".") + if p > 0: + module_name = module_name[:p] + else: + break + if len(names) != 1: + # either it was not resolved due to multiple packages with the same name + # or it's a multipurpose package (like '__pycache__') + return None + return (names[0], get_version_for_package(names[0])) @cached(maxsize=256) diff --git a/ddtrace/internal/processor/stats.py b/ddtrace/internal/processor/stats.py index 1ba8e008105..f79f460582e 100644 --- a/ddtrace/internal/processor/stats.py +++ b/ddtrace/internal/processor/stats.py @@ -19,7 +19,6 @@ from ..hostname import get_hostname from ..logger import get_logger from ..periodic import PeriodicService -from ..runtime import container from ..writer import _human_size @@ -108,8 +107,6 @@ def __init__(self, agent_url, interval=None, timeout=1.0, retry_attempts=3): "Datadog-Meta-Tracer-Version": ddtrace.__version__, "Content-Type": "application/msgpack", } # type: Dict[str, str] - container.update_headers_with_container_info(self._headers, container.get_container_info()) - container.update_header_with_external_info(self._headers) self._hostname = "" if config._report_hostname: self._hostname = get_hostname() diff --git a/ddtrace/internal/remoteconfig/client.py b/ddtrace/internal/remoteconfig/client.py index 3f9315d0be0..6d77f220d81 100644 --- a/ddtrace/internal/remoteconfig/client.py +++ b/ddtrace/internal/remoteconfig/client.py @@ -29,7 +29,6 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.packages import is_distribution_available from ddtrace.internal.remoteconfig.constants import REMOTE_CONFIG_AGENT_ENDPOINT -from ddtrace.internal.runtime import container from ddtrace.internal.service import ServiceStatus from ddtrace.internal.utils.time import parse_isoformat @@ -240,9 +239,6 @@ def __init__(self) -> None: if additional_header_str is not None: self._headers.update(parse_tags_str(additional_header_str)) - container.update_headers_with_container_info(self._headers, container.get_container_info()) - container.update_header_with_external_info(self._headers) - tags = ddtrace.config.tags.copy() # Add git metadata tags, if available diff --git a/ddtrace/internal/runtime/container.py b/ddtrace/internal/runtime/container.py index c12a091fe21..6025981a726 100644 --- a/ddtrace/internal/runtime/container.py +++ b/ddtrace/internal/runtime/container.py @@ -1,9 +1,18 @@ import errno +from functools import lru_cache import os import re +import sys from typing import Any from typing import Dict from typing import Optional +from typing import Union + + +if sys.version_info >= (3, 8): + from typing import Literal # noqa:F401 +else: + from typing_extensions import Literal from ..constants import CONTAINER_ID_HEADER_NAME from ..constants import ENTITY_ID_HEADER_NAME @@ -130,25 +139,17 @@ def from_line(cls, line): ) -def get_container_info(pid="self"): - # type: (str) -> Optional[CGroupInfo] +@lru_cache(64) +def get_container_info(pid: Union[Literal["self"], int] = "self") -> Optional[CGroupInfo]: """ - Helper to fetch the current container id, if we are running in a container + Helper to fetch the current container id, if we are running in a container. We will parse `/proc/{pid}/cgroup` to determine our container id. - The results of calling this function are cached - - :param pid: The pid of the cgroup file to parse (default: 'self') - :type pid: str | int - :returns: The cgroup file info if found, or else None - :rtype: :class:`CGroupInfo` | None + The results of calling this function are cached. """ - - cgroup_file = "/proc/{0}/cgroup".format(pid) - try: - with open(cgroup_file, mode="r") as fp: + with open(f"/proc/{pid}/cgroup", mode="r") as fp: for line in fp: info = CGroupInfo.from_line(line) if info and (info.container_id or info.node_inode): @@ -161,27 +162,26 @@ def get_container_info(pid="self"): return None -def update_headers_with_container_info(headers: Dict, container_info: Optional[CGroupInfo]) -> None: +def update_headers(headers: Dict) -> None: """Get the container info (either the container ID or the cgroup inode) and add it to the headers.""" - if container_info is None: - return - if container_info.container_id: - headers.update( - { - CONTAINER_ID_HEADER_NAME: container_info.container_id, - ENTITY_ID_HEADER_NAME: f"ci-{container_info.container_id}", - } - ) - elif container_info.node_inode: - headers.update( - { - ENTITY_ID_HEADER_NAME: f"in-{container_info.node_inode}", - } - ) - - -def update_header_with_external_info(headers: Dict) -> None: - """Get the external environment info from the environment variable and add it to the headers.""" + container_info = get_container_info() + if container_info is not None: + if container_info.container_id: + headers.update( + { + CONTAINER_ID_HEADER_NAME: container_info.container_id, + ENTITY_ID_HEADER_NAME: f"ci-{container_info.container_id}", + } + ) + elif container_info.node_inode: + headers.update( + { + ENTITY_ID_HEADER_NAME: f"in-{container_info.node_inode}", + } + ) + + # Get the external environment info from the environment variable and add it + # to the headers external_info = os.environ.get(EXTERNAL_ENV_ENVIRONMENT_VARIABLE) if external_info: headers.update( diff --git a/ddtrace/internal/symbol_db/symbols.py b/ddtrace/internal/symbol_db/symbols.py index e252e22c0bf..228ed53f53d 100644 --- a/ddtrace/internal/symbol_db/symbols.py +++ b/ddtrace/internal/symbol_db/symbols.py @@ -3,6 +3,7 @@ from dataclasses import field import dis from enum import Enum +import gzip from http.client import HTTPResponse from inspect import CO_VARARGS from inspect import CO_VARKEYWORDS @@ -484,13 +485,18 @@ def upload(self) -> HTTPResponse: ), FormData( name="file", - filename="symdb_export.json", - data=json.dumps(self.to_json()), - content_type="json", + filename=f"symbols_{os.getpid()}.json.gz", + data="[symbols_placeholder]", + content_type="gzip", ), ] ) + # DEV: The as_bytes method ends up writing the data line by line, which + # breaks the final payload. We add a placeholder instead and manually + # replace it with the compressed JSON. + body = body.replace(b"[symbols_placeholder]", gzip.compress(json.dumps(self.to_json()).encode("utf-8"))) + with connector(get_trace_url(), timeout=5.0)() as conn: log.debug("[PID %d] SymDB: Uploading symbols payload", os.getpid()) conn.request("POST", "/symdb/v1/input", body, headers) @@ -527,7 +533,7 @@ def is_module_included(module: ModuleType) -> bool: class SymbolDatabaseUploader(BaseModuleWatchdog): - __scope_limit__ = 100 + __scope_limit__ = 400 def __init__(self) -> None: super().__init__() diff --git a/ddtrace/internal/telemetry/data.py b/ddtrace/internal/telemetry/data.py index 3b73ac8b97d..a11e7f4db36 100644 --- a/ddtrace/internal/telemetry/data.py +++ b/ddtrace/internal/telemetry/data.py @@ -77,15 +77,15 @@ def update_imported_dependencies(already_imported: Dict[str, str], new_modules: if not dists: continue - for name, version in dists.items(): - if name == "ddtrace": - continue + name, version = dists + if name == "ddtrace": + continue - if name in already_imported: - continue + if name in already_imported: + continue - already_imported[name] = version - deps.append({"name": name, "version": version}) + already_imported[name] = version + deps.append({"name": name, "version": version}) return deps diff --git a/ddtrace/internal/telemetry/modules.py b/ddtrace/internal/telemetry/modules.py index 3b916fb1282..555e0b70d7e 100644 --- a/ddtrace/internal/telemetry/modules.py +++ b/ddtrace/internal/telemetry/modules.py @@ -1,92 +1,13 @@ import sys -from types import ModuleType -from typing import Any from typing import Set -from typing import Tuple -from ..compat import PYTHON_VERSION_INFO -from ..module import BaseModuleWatchdog +ALL_MODULES: Set[str] = set() # All modules that have been already imported -NEW_MODULES: Set[str] = set() # New modules that have been imported since the last check -ALL_MODULES: Set[str] = set() # All modules that have been imported -MODULE_HOOK_INSTALLED = False -# For Python >= 3.8 we can use the sys.audit event import(module, filename, sys.path, sys.meta_path, sys.path_hooks) -if PYTHON_VERSION_INFO >= (3, 8): - - def audit_hook(event: str, args: Tuple[Any, ...]): - if event != "import": - return - - global NEW_MODULES, ALL_MODULES - NEW_MODULES.add(args[0]) - ALL_MODULES.add(args[0]) - - def get_newly_imported_modules() -> Set[str]: - global MODULE_HOOK_INSTALLED, NEW_MODULES, ALL_MODULES - - # Our hook is not installed, so we are not getting notified of new imports, - # we need to track the changes manually - if not NEW_MODULES and not MODULE_HOOK_INSTALLED: - latest_modules = set(sys.modules.keys()) - NEW_MODULES = latest_modules - ALL_MODULES - ALL_MODULES = latest_modules - - new_modules = NEW_MODULES - NEW_MODULES = set() - return new_modules - - def install_import_hook(): - global MODULE_HOOK_INSTALLED, NEW_MODULES, ALL_MODULES - - # If we have not called get_newly_imported_modules yet, we can initialize to all imported modules - if not NEW_MODULES: - NEW_MODULES = set(sys.modules.keys()) - ALL_MODULES = NEW_MODULES.copy() - sys.addaudithook(audit_hook) - MODULE_HOOK_INSTALLED = True - - def uninstall_import_hook(): - # We cannot uninstall a sys audit hook - pass - -else: - - class TelemetryWriterModuleWatchdog(BaseModuleWatchdog): - _initial = True - _new_imported: Set[str] = set() - - def after_import(self, module: ModuleType) -> None: - self._new_imported.add(module.__name__) - - @classmethod - def get_new_imports(cls): - if cls._initial: - try: - # On the first call, use sys.modules to cover all imports before we started. This is not - # done on __init__ because we want to do this slow operation on the writer's periodic call - # and not on instantiation. - new_imports = list(sys.modules.keys()) - except RuntimeError: - new_imports = [] - finally: - # If there is any problem with the above we don't want to repeat this slow process, instead we just - # switch to report new dependencies on further calls - cls._initial = False - else: - new_imports = list(cls._new_imported) - - cls._new_imported.clear() - return new_imports - - def get_newly_imported_modules() -> Set[str]: - return set(TelemetryWriterModuleWatchdog.get_new_imports()) - - def install_import_hook(): - if not TelemetryWriterModuleWatchdog.is_installed(): - TelemetryWriterModuleWatchdog.install() - - def uninstall_import_hook(): - if TelemetryWriterModuleWatchdog.is_installed(): - TelemetryWriterModuleWatchdog.uninstall() +def get_newly_imported_modules() -> Set[str]: + global ALL_MODULES + latest_modules = set(sys.modules.keys()) + new_modules = latest_modules - ALL_MODULES + ALL_MODULES = latest_modules + return new_modules diff --git a/ddtrace/internal/telemetry/writer.py b/ddtrace/internal/telemetry/writer.py index 899c77c1108..71de6b03907 100644 --- a/ddtrace/internal/telemetry/writer.py +++ b/ddtrace/internal/telemetry/writer.py @@ -23,7 +23,6 @@ from ..compat import get_connection_response from ..encoding import JSONEncoderV2 from ..periodic import PeriodicService -from ..runtime import container from ..runtime import get_runtime_id from ..service import ServiceStatus from ..utils.formats import asbool @@ -136,8 +135,6 @@ def get_headers(self, request): headers["DD-Telemetry-Debug-Enabled"] = request["debug"] headers["DD-Telemetry-Request-Type"] = request["request_type"] headers["DD-Telemetry-API-Version"] = request["api_version"] - container.update_headers_with_container_info(headers, container.get_container_info()) - container.update_header_with_external_info(headers) return headers def get_endpoint(self, agentless: bool) -> str: @@ -184,7 +181,6 @@ def __init__(self, is_periodic=True, agentless=None): self._forked = False # type: bool self._events_queue = [] # type: List[Dict] self._configuration_queue = {} # type: Dict[str, Dict] - self._lock = forksafe.Lock() # type: forksafe.ResetObject self._imported_dependencies: Dict[str, str] = dict() self._product_enablement = {product.value: False for product in TELEMETRY_APM_PRODUCT} self._send_product_change_updates = False @@ -236,9 +232,8 @@ def enable(self): self.start() return True + # currently self._is_periodic is always true self.status = ServiceStatus.RUNNING - if _TelemetryConfig.DEPENDENCY_COLLECTION: - modules.install_import_hook() return True def disable(self): @@ -248,12 +243,7 @@ def disable(self): Once disabled, telemetry collection can not be re-enabled. """ self._enabled = False - modules.uninstall_import_hook() self.reset_queues() - if self._is_running(): - self.stop() - else: - self.status = ServiceStatus.STOPPED def enable_agentless_client(self, enabled=True): # type: (bool) -> None @@ -302,7 +292,7 @@ def add_integration(self, integration_name, patched, auto_patched=None, error_ms :param bool auto_enabled: True if module is enabled in _monkey.PATCH_MODULES """ # Integrations can be patched before the telemetry writer is enabled. - with self._lock: + with self._service_lock: if integration_name not in self._integrations_queue: self._integrations_queue[integration_name] = {"name": integration_name} @@ -387,20 +377,20 @@ def _app_integrations_changed_event(self, integrations): def _flush_integrations_queue(self): # type: () -> List[Dict] """Flushes and returns a list of all queued integrations""" - with self._lock: + with self._service_lock: integrations = list(self._integrations_queue.values()) self._integrations_queue = dict() return integrations def _flush_new_imported_dependencies(self) -> Set[str]: - with self._lock: + with self._service_lock: new_deps = modules.get_newly_imported_modules() return new_deps def _flush_configuration_queue(self): # type: () -> List[Dict] """Flushes and returns a list of all queued configurations""" - with self._lock: + with self._service_lock: configurations = list(self._configuration_queue.values()) self._configuration_queue = {} return configurations @@ -419,7 +409,7 @@ def _app_dependencies_loaded_event(self, newly_imported_deps: List[str]): if not _TelemetryConfig.DEPENDENCY_COLLECTION or not self._enabled: return - with self._lock: + with self._service_lock: packages = update_imported_dependencies(self._imported_dependencies, newly_imported_deps) if packages: @@ -456,7 +446,7 @@ def product_activated(self, product, enabled): self._send_product_change_updates = True def remove_configuration(self, configuration_name): - with self._lock: + with self._service_lock: del self._configuration_queue[configuration_name] def add_configuration(self, configuration_name, configuration_value, origin="unknown"): @@ -470,7 +460,7 @@ def add_configuration(self, configuration_name, configuration_value, origin="unk # convert unsupported types to strings configuration_value = str(configuration_value) - with self._lock: + with self._service_lock: self._configuration_queue[configuration_name] = { "name": configuration_name, "origin": origin, @@ -480,7 +470,7 @@ def add_configuration(self, configuration_name, configuration_value, origin="unk def add_configurations(self, configuration_list): # type: (List[Tuple[str, Union[bool, float, str], str]]) -> None """Creates and queues a list of configurations""" - with self._lock: + with self._service_lock: for name, value, _origin in configuration_list: self._configuration_queue[name] = { "name": name, @@ -571,7 +561,7 @@ def add_distribution_metric(self, namespace, name, value=1.0, tags=None): def _flush_log_metrics(self): # type () -> Set[Metric] - with self._lock: + with self._service_lock: log_metrics = self._logs self._logs = set() return log_metrics @@ -657,7 +647,7 @@ def reset_queues(self): def _flush_events_queue(self): # type: () -> List[Dict] """Flushes and returns a list of all telemtery event""" - with self._lock: + with self._service_lock: events = self._events_queue self._events_queue = [] return events @@ -673,7 +663,6 @@ def _fork_writer(self): if self._is_running(): self.stop(join=False) - # Enable writer service in child process to avoid interpreter shutdown # error in Python 3.12 self.enable() diff --git a/ddtrace/internal/test_visibility/_benchmark_mixin.py b/ddtrace/internal/test_visibility/_benchmark_mixin.py new file mode 100644 index 00000000000..c41d45b10a5 --- /dev/null +++ b/ddtrace/internal/test_visibility/_benchmark_mixin.py @@ -0,0 +1,75 @@ +import typing as t + +from ddtrace.ext.test_visibility._utils import _catch_and_log_exceptions +from ddtrace.internal import core +from ddtrace.internal.logger import get_logger +from ddtrace.internal.test_visibility._internal_item_ids import InternalTestId + + +log = get_logger(__name__) + + +class BenchmarkDurationData(t.NamedTuple): + duration_info: t.Optional[str] = None + duration_mean: t.Optional[float] = None + duration_runs: t.Optional[int] = None + statistics_hd15iqr: t.Optional[float] = None + statistics_iqr: t.Optional[float] = None + statistics_iqr_outliers: t.Optional[float] = None + statistics_ld15iqr: t.Optional[float] = None + statistics_max: t.Optional[float] = None + statistics_mean: t.Optional[float] = None + statistics_median: t.Optional[float] = None + statistics_min: t.Optional[float] = None + statistics_n: t.Optional[float] = None + statistics_ops: t.Optional[float] = None + statistics_outliers: t.Optional[float] = None + statistics_q1: t.Optional[float] = None + statistics_q3: t.Optional[float] = None + statistics_std_dev: t.Optional[float] = None + statistics_std_dev_outliers: t.Optional[float] = None + statistics_total: t.Optional[float] = None + + +class BenchmarkTestMixin: + class SetBenchmarkDataArgs(t.NamedTuple): + test_id: InternalTestId + benchmark_data: t.Optional[BenchmarkDurationData] + is_benchmark: bool = True + + @classmethod + @_catch_and_log_exceptions + def set_benchmark_data( + cls, + item_id: InternalTestId, + benchmark_data: t.Optional[BenchmarkDurationData] = None, + is_benchmark: bool = True, + ): + log.debug("Setting benchmark data for test %s: %s", item_id, benchmark_data) + core.dispatch( + "test_visibility.test.set_benchmark_data", + (BenchmarkTestMixin.SetBenchmarkDataArgs(item_id, benchmark_data, is_benchmark),), + ) + + +BENCHMARK_TAG_MAP = { + "duration_info": "benchmark.duration.info", + "duration_mean": "benchmark.duration.mean", + "duration_runs": "benchmark.duration.runs", + "statistics_hd15iqr": "benchmark.duration.statistics.hd15iqr", + "statistics_iqr": "benchmark.duration.statistics.iqr", + "statistics_iqr_outliers": "benchmark.duration.statistics.iqr_outliers", + "statistics_ld15iqr": "benchmark.duration.statistics.ld15iqr", + "statistics_max": "benchmark.duration.statistics.max", + "statistics_mean": "benchmark.duration.statistics.mean", + "statistics_median": "benchmark.duration.statistics.median", + "statistics_min": "benchmark.duration.statistics.min", + "statistics_n": "benchmark.duration.statistics.n", + "statistics_ops": "benchmark.duration.statistics.ops", + "statistics_outliers": "benchmark.duration.statistics.outliers", + "statistics_q1": "benchmark.duration.statistics.q1", + "statistics_q3": "benchmark.duration.statistics.q3", + "statistics_std_dev": "benchmark.duration.statistics.std_dev", + "statistics_std_dev_outliers": "benchmark.duration.statistics.std_dev_outliers", + "statistics_total": "benchmark.duration.statistics.total", +} diff --git a/ddtrace/internal/test_visibility/api.py b/ddtrace/internal/test_visibility/api.py index c5084d320cb..84f559a4701 100644 --- a/ddtrace/internal/test_visibility/api.py +++ b/ddtrace/internal/test_visibility/api.py @@ -3,6 +3,7 @@ from typing import NamedTuple from ddtrace import Span +from ddtrace import Tracer from ddtrace.ext.test_visibility import api as ext_api from ddtrace.ext.test_visibility._test_visibility_base import TestSessionId from ddtrace.ext.test_visibility._utils import _catch_and_log_exceptions @@ -14,6 +15,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.test_visibility._atr_mixins import ATRSessionMixin from ddtrace.internal.test_visibility._atr_mixins import ATRTestMixin +from ddtrace.internal.test_visibility._benchmark_mixin import BenchmarkTestMixin from ddtrace.internal.test_visibility._efd_mixins import EFDSessionMixin from ddtrace.internal.test_visibility._efd_mixins import EFDTestMixin from ddtrace.internal.test_visibility._internal_item_ids import InternalTestId @@ -72,7 +74,15 @@ def get_codeowners() -> t.Optional[_Codeowners]: @staticmethod @_catch_and_log_exceptions - def get_workspace_path() -> Path: + def get_tracer() -> t.Optional[Tracer]: + log.debug("Getting test session tracer") + tracer: t.Optional[Tracer] = core.dispatch_with_results("test_visibility.session.get_tracer").tracer.value + log.debug("Got test session tracer: %s", tracer) + return tracer + + @staticmethod + @_catch_and_log_exceptions + def get_workspace_path() -> t.Optional[Path]: log.debug("Getting session workspace path") workspace_path: Path = core.dispatch_with_results( @@ -132,7 +142,7 @@ class InternalTestSuite(ext_api.TestSuite, InternalTestBase, ITRMixin): pass -class InternalTest(ext_api.Test, InternalTestBase, ITRMixin, EFDTestMixin, ATRTestMixin): +class InternalTest(ext_api.Test, InternalTestBase, ITRMixin, EFDTestMixin, ATRTestMixin, BenchmarkTestMixin): class FinishArgs(NamedTuple): """InternalTest allows finishing with an overridden finish time (for EFD and other retry purposes)""" @@ -164,3 +174,32 @@ def is_new_test(item_id: InternalTestId) -> bool: is_new = bool(core.dispatch_with_results("test_visibility.test.is_new", (item_id,)).is_new.value) log.debug("Test %s is new: %s", item_id, is_new) return is_new + + class OverwriteAttributesArgs(NamedTuple): + test_id: InternalTestId + name: t.Optional[str] = None + suite_name: t.Optional[str] = None + parameters: t.Optional[str] = None + codeowners: t.Optional[t.List[str]] = None + + @staticmethod + @_catch_and_log_exceptions + def overwrite_attributes( + item_id: InternalTestId, + name: t.Optional[str] = None, + suite_name: t.Optional[str] = None, + parameters: t.Optional[str] = None, + codeowners: t.Optional[t.List[str]] = None, + ): + log.debug( + "Overwriting attributes for test %s: name=%s" ", suite_name=%s" ", parameters=%s" ", codeowners=%s", + item_id, + name, + suite_name, + parameters, + codeowners, + ) + core.dispatch( + "test_visibility.test.overwrite_attributes", + (InternalTest.OverwriteAttributesArgs(item_id, name, suite_name, parameters, codeowners),), + ) diff --git a/ddtrace/internal/uds.py b/ddtrace/internal/uds.py index 5c8dbf02882..fcf4e52e916 100644 --- a/ddtrace/internal/uds.py +++ b/ddtrace/internal/uds.py @@ -2,10 +2,10 @@ from typing import Any # noqa:F401 from .compat import httplib -from .http import BasePathMixin +from .http import HTTPConnectionMixin -class UDSHTTPConnection(BasePathMixin, httplib.HTTPConnection): +class UDSHTTPConnection(HTTPConnectionMixin, httplib.HTTPConnection): """An HTTP connection established over a Unix Domain Socket.""" # It's "important" to keep the hostname and port arguments here; while there are not used by the connection diff --git a/ddtrace/internal/utils/__init__.py b/ddtrace/internal/utils/__init__.py index 294e99d1263..1d7ee493953 100644 --- a/ddtrace/internal/utils/__init__.py +++ b/ddtrace/internal/utils/__init__.py @@ -3,6 +3,7 @@ from typing import List # noqa:F401 from typing import Optional # noqa:F401 from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 class ArgumentError(Exception): @@ -13,7 +14,7 @@ class ArgumentError(Exception): def get_argument_value( - args: List[Any], + args: Union[Tuple[Any], List[Any]], kwargs: Dict[str, Any], pos: int, kw: str, diff --git a/ddtrace/internal/utils/http.py b/ddtrace/internal/utils/http.py index cba605a1527..7e85ce01356 100644 --- a/ddtrace/internal/utils/http.py +++ b/ddtrace/internal/utils/http.py @@ -1,5 +1,6 @@ from contextlib import contextmanager from dataclasses import dataclass +from email.encoders import encode_noop from json import loads import logging import os @@ -418,7 +419,7 @@ def parse_message(msg): class FormData: name: str filename: str - data: str + data: Union[str, bytes] content_type: str @@ -431,12 +432,12 @@ def multipart(parts: List[FormData]) -> Tuple[bytes, dict]: del msg["MIME-Version"] for part in parts: - app = MIMEApplication(part.data, part.content_type, lambda _: _) + app = MIMEApplication(part.data, part.content_type, encode_noop) app.add_header("Content-Disposition", "form-data", name=part.name, filename=part.filename) del app["MIME-Version"] msg.attach(app) # Split headers and body - headers, _, body = msg.as_string(policy=HTTP).partition("\r\n\r\n") + headers, _, body = msg.as_bytes(policy=HTTP).partition(b"\r\n\r\n") - return body.encode("utf-8"), dict(_.split(": ") for _ in headers.splitlines()) + return body, dict(_.split(": ") for _ in headers.decode().splitlines()) diff --git a/ddtrace/internal/utils/inspection.py b/ddtrace/internal/utils/inspection.py index 7f739d4bf65..bb24a3ae80d 100644 --- a/ddtrace/internal/utils/inspection.py +++ b/ddtrace/internal/utils/inspection.py @@ -5,6 +5,7 @@ from pathlib import Path from types import CodeType from types import FunctionType +from typing import Iterator from typing import Set from typing import cast @@ -112,3 +113,12 @@ def match(g): pass return f + + +def collect_code_objects(code: CodeType) -> Iterator[CodeType]: + q = deque([code]) + while q: + c = q.popleft() + for new_code in (_ for _ in c.co_consts if isinstance(_, CodeType)): + yield new_code + q.append(new_code) diff --git a/ddtrace/internal/wrapping/__init__.py b/ddtrace/internal/wrapping/__init__.py index dae0c183ac0..83598e1911c 100644 --- a/ddtrace/internal/wrapping/__init__.py +++ b/ddtrace/internal/wrapping/__init__.py @@ -144,6 +144,8 @@ def wrap_bytecode(wrapper, wrapped): bc.Instr("RESUME", 0, lineno=lineno - 1), bc.Instr("PUSH_NULL", lineno=lineno), ] + if PY >= (3, 13): + instrs[1], instrs[2] = instrs[2], instrs[1] if code.co_cellvars: instrs[0:0] = [Instr("MAKE_CELL", bc.CellVar(_), lineno=lineno) for _ in code.co_cellvars] diff --git a/ddtrace/internal/wrapping/context.py b/ddtrace/internal/wrapping/context.py index 24fd91d483e..c6b4ee896e2 100644 --- a/ddtrace/internal/wrapping/context.py +++ b/ddtrace/internal/wrapping/context.py @@ -1,3 +1,4 @@ +from abc import ABC from contextvars import ContextVar from inspect import iscoroutinefunction import sys @@ -30,8 +31,9 @@ # # Because we also want to capture the return value, our context manager extends # the Python one by implementing a __return__ method that will be called with -# the return value of the function. The __exit__ method is only called if the -# function raises an exception. +# the return value of the function. Contrary to ordinary context managers, +# though, the __exit__ method is only called if the function raises an +# exception. # # Because CPython 3.11 introduced zero-cost exceptions, we cannot nest try # blocks in the function's bytecode. In this case, we call the context manager @@ -68,7 +70,55 @@ CONTEXT_RETURN = Assembly() CONTEXT_FOOT = Assembly() -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 14): + raise NotImplementedError("Python >= 3.14 is not supported yet") +elif sys.version_info >= (3, 13): + CONTEXT_HEAD.parse( + r""" + load_const {context_enter} + push_null + call 0 + pop_top + """ + ) + CONTEXT_RETURN.parse( + r""" + push_null + load_const {context_return} + swap 3 + call 1 + """ + ) + + CONTEXT_RETURN_CONST = Assembly() + CONTEXT_RETURN_CONST.parse( + r""" + load_const {context_return} + push_null + load_const {value} + call 1 + """ + ) + + CONTEXT_FOOT.parse( + r""" + try @_except lasti + push_exc_info + load_const {context_exit} + push_null + call 0 + pop_top + reraise 2 + tried + + _except: + copy 3 + pop_except + reraise 1 + """ + ) + +elif sys.version_info >= (3, 12): CONTEXT_HEAD.parse( r""" push_null @@ -256,7 +306,7 @@ # This is abstract and should not be used directly -class BaseWrappingContext(t.ContextManager): +class BaseWrappingContext(ABC): __priority__: int = 0 def __init__(self, f: FunctionType): diff --git a/ddtrace/internal/writer/writer.py b/ddtrace/internal/writer/writer.py index 59eb56bd821..01b05515984 100644 --- a/ddtrace/internal/writer/writer.py +++ b/ddtrace/internal/writer/writer.py @@ -31,7 +31,6 @@ from ..constants import _HTTPLIB_NO_TRACE_REQUEST from ..encoding import JSONEncoderV2 from ..logger import get_logger -from ..runtime import container from ..serverless import in_azure_function from ..serverless import in_gcp_function from ..sma import SimpleMovingAverage @@ -493,9 +492,6 @@ def __init__( } if headers: _headers.update(headers) - self._container_info = container.get_container_info() - container.update_headers_with_container_info(_headers, self._container_info) - container.update_header_with_external_info(_headers) _headers.update({"Content-Type": client.encoder.content_type}) # type: ignore[attr-defined] additional_header_str = os.environ.get("_DD_TRACE_WRITER_ADDITIONAL_HEADERS") diff --git a/ddtrace/llmobs/_integrations/anthropic.py b/ddtrace/llmobs/_integrations/anthropic.py index 0747d68e77b..dfb39c0f7e9 100644 --- a/ddtrace/llmobs/_integrations/anthropic.py +++ b/ddtrace/llmobs/_integrations/anthropic.py @@ -19,7 +19,6 @@ from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration from ddtrace.llmobs._utils import _get_attr -from ddtrace.llmobs._utils import safe_json log = get_logger(__name__) @@ -66,21 +65,21 @@ def _llmobs_set_tags( system_prompt = kwargs.get("system") input_messages = self._extract_input_message(messages, system_prompt) - span.set_tag_str(SPAN_KIND, "llm") - span.set_tag_str(MODEL_NAME, span.get_tag("anthropic.request.model") or "") - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages)) - span.set_tag_str(METADATA, safe_json(parameters)) - span.set_tag_str(MODEL_PROVIDER, "anthropic") - - if span.error or response is None: - span.set_tag_str(OUTPUT_MESSAGES, json.dumps([{"content": ""}])) - else: + output_messages = [{"content": ""}] + if not span.error and response is not None: output_messages = self._extract_output_message(response) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) - usage = self._get_llmobs_metrics_tags(span) - if usage: - span.set_tag_str(METRICS, safe_json(usage)) + span._set_ctx_items( + { + SPAN_KIND: "llm", + MODEL_NAME: span.get_tag("anthropic.request.model") or "", + MODEL_PROVIDER: "anthropic", + INPUT_MESSAGES: input_messages, + METADATA: parameters, + OUTPUT_MESSAGES: output_messages, + METRICS: self._get_llmobs_metrics_tags(span), + } + ) def _extract_input_message(self, messages, system_prompt=None): """Extract input messages from the stored prompt. diff --git a/ddtrace/llmobs/_integrations/base.py b/ddtrace/llmobs/_integrations/base.py index 709e72f3a26..a6968ce0d83 100644 --- a/ddtrace/llmobs/_integrations/base.py +++ b/ddtrace/llmobs/_integrations/base.py @@ -133,7 +133,7 @@ def trace(self, pin: Pin, operation_id: str, submit_to_llmobs: bool = False, **k # The LLMObs parent ID tag is not set at span start time. We need to manually set the parent ID tag now # in these cases to avoid conflicting with the later propagated tags. parent_id = _get_llmobs_parent_id(span) or "undefined" - span.set_tag_str(PARENT_ID_KEY, str(parent_id)) + span._set_ctx_item(PARENT_ID_KEY, str(parent_id)) return span @classmethod diff --git a/ddtrace/llmobs/_integrations/bedrock.py b/ddtrace/llmobs/_integrations/bedrock.py index 78798ae4f98..d2d57b50ed3 100644 --- a/ddtrace/llmobs/_integrations/bedrock.py +++ b/ddtrace/llmobs/_integrations/bedrock.py @@ -19,7 +19,6 @@ from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations import BaseLLMIntegration from ddtrace.llmobs._utils import _get_llmobs_parent_id -from ddtrace.llmobs._utils import safe_json log = get_logger(__name__) @@ -39,7 +38,7 @@ def _llmobs_set_tags( """Extract prompt/response tags from a completion and set them as temporary "_ml_obs.*" tags.""" if span.get_tag(PROPAGATED_PARENT_ID_KEY) is None: parent_id = _get_llmobs_parent_id(span) or "undefined" - span.set_tag(PARENT_ID_KEY, parent_id) + span._set_ctx_item(PARENT_ID_KEY, parent_id) parameters = {} if span.get_tag("bedrock.request.temperature"): parameters["temperature"] = float(span.get_tag("bedrock.request.temperature") or 0.0) @@ -48,20 +47,20 @@ def _llmobs_set_tags( prompt = kwargs.get("prompt", "") input_messages = self._extract_input_message(prompt) - - span.set_tag_str(SPAN_KIND, "llm") - span.set_tag_str(MODEL_NAME, span.get_tag("bedrock.request.model") or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag("bedrock.request.model_provider") or "") - - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages)) - span.set_tag_str(METADATA, safe_json(parameters)) - if span.error or response is None: - span.set_tag_str(OUTPUT_MESSAGES, safe_json([{"content": ""}])) - else: + output_messages = [{"content": ""}] + if not span.error and response is not None: output_messages = self._extract_output_message(response) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) - metrics = self._llmobs_metrics(span, response) - span.set_tag_str(METRICS, safe_json(metrics)) + span._set_ctx_items( + { + SPAN_KIND: "llm", + MODEL_NAME: span.get_tag("bedrock.request.model") or "", + MODEL_PROVIDER: span.get_tag("bedrock.request.model_provider") or "", + INPUT_MESSAGES: input_messages, + METADATA: parameters, + METRICS: self._llmobs_metrics(span, response), + OUTPUT_MESSAGES: output_messages, + } + ) @staticmethod def _llmobs_metrics(span: Span, response: Optional[Dict[str, Any]]) -> Dict[str, Any]: diff --git a/ddtrace/llmobs/_integrations/gemini.py b/ddtrace/llmobs/_integrations/gemini.py index f1a4730812f..491187475f0 100644 --- a/ddtrace/llmobs/_integrations/gemini.py +++ b/ddtrace/llmobs/_integrations/gemini.py @@ -19,7 +19,6 @@ from ddtrace.llmobs._integrations.utils import get_system_instructions_from_google_model from ddtrace.llmobs._integrations.utils import llmobs_get_metadata_google from ddtrace.llmobs._utils import _get_attr -from ddtrace.llmobs._utils import safe_json class GeminiIntegration(BaseLLMIntegration): @@ -41,28 +40,28 @@ def _llmobs_set_tags( response: Optional[Any] = None, operation: str = "", ) -> None: - span.set_tag_str(SPAN_KIND, "llm") - span.set_tag_str(MODEL_NAME, span.get_tag("google_generativeai.request.model") or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag("google_generativeai.request.provider") or "") - instance = kwargs.get("instance", None) metadata = llmobs_get_metadata_google(kwargs, instance) - span.set_tag_str(METADATA, safe_json(metadata)) system_instruction = get_system_instructions_from_google_model(instance) input_contents = get_argument_value(args, kwargs, 0, "contents") input_messages = self._extract_input_message(input_contents, system_instruction) - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages)) - if span.error or response is None: - span.set_tag_str(OUTPUT_MESSAGES, safe_json([{"content": ""}])) - else: + output_messages = [{"content": ""}] + if not span.error and response is not None: output_messages = self._extract_output_message(response) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) - usage = get_llmobs_metrics_tags_google("google_generativeai", span) - if usage: - span.set_tag_str(METRICS, safe_json(usage)) + span._set_ctx_items( + { + SPAN_KIND: "llm", + MODEL_NAME: span.get_tag("google_generativeai.request.model") or "", + MODEL_PROVIDER: span.get_tag("google_generativeai.request.provider") or "", + METADATA: metadata, + INPUT_MESSAGES: input_messages, + OUTPUT_MESSAGES: output_messages, + METRICS: get_llmobs_metrics_tags_google("google_generativeai", span), + } + ) def _extract_input_message(self, contents, system_instruction=None): messages = [] diff --git a/ddtrace/llmobs/_integrations/langchain.py b/ddtrace/llmobs/_integrations/langchain.py index c2304289c2c..1fce3d11804 100644 --- a/ddtrace/llmobs/_integrations/langchain.py +++ b/ddtrace/llmobs/_integrations/langchain.py @@ -28,7 +28,6 @@ from ddtrace.llmobs._constants import SPAN_KIND from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration -from ddtrace.llmobs._utils import safe_json from ddtrace.llmobs.utils import Document @@ -44,6 +43,8 @@ ANTHROPIC_PROVIDER_NAME = "anthropic" BEDROCK_PROVIDER_NAME = "amazon_bedrock" OPENAI_PROVIDER_NAME = "openai" +VERTEXAI_PROVIDER_NAME = "vertexai" +GEMINI_PROVIDER_NAME = "google_palm" ROLE_MAPPING = { "human": "user", @@ -81,6 +82,12 @@ def _llmobs_set_tags( if model_provider: if model_provider.startswith(BEDROCK_PROVIDER_NAME): llmobs_integration = "bedrock" + # only the llm interface for Vertex AI will get instrumented + elif model_provider.startswith(VERTEXAI_PROVIDER_NAME) and operation == "llm": + llmobs_integration = "vertexai" + # only the llm interface for Gemini will get instrumented + elif model_provider.startswith(GEMINI_PROVIDER_NAME) and operation == "llm": + llmobs_integration = "google_generativeai" elif model_provider.startswith(OPENAI_PROVIDER_NAME): llmobs_integration = "openai" elif operation == "chat" and model_provider.startswith(ANTHROPIC_PROVIDER_NAME): @@ -122,15 +129,11 @@ def _llmobs_set_metadata(self, span: Span, model_provider: Optional[str] = None) if max_tokens is not None and max_tokens != "None": metadata["max_tokens"] = int(max_tokens) if metadata: - span.set_tag_str(METADATA, safe_json(metadata)) + span._set_ctx_item(METADATA, metadata) def _llmobs_set_tags_from_llm( self, span: Span, args: List[Any], kwargs: Dict[str, Any], completions: Any, is_workflow: bool = False ) -> None: - span.set_tag_str(SPAN_KIND, "workflow" if is_workflow else "llm") - span.set_tag_str(MODEL_NAME, span.get_tag(MODEL) or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag(PROVIDER) or "") - input_tag_key = INPUT_VALUE if is_workflow else INPUT_MESSAGES output_tag_key = OUTPUT_VALUE if is_workflow else OUTPUT_MESSAGES stream = span.get_tag("langchain.request.stream") @@ -138,21 +141,28 @@ def _llmobs_set_tags_from_llm( prompts = get_argument_value(args, kwargs, 0, "input" if stream else "prompts") if isinstance(prompts, str) or not isinstance(prompts, list): prompts = [prompts] - if stream: # chat and llm take the same input types for streamed calls - span.set_tag_str(input_tag_key, safe_json(self._handle_stream_input_messages(prompts))) + input_messages = self._handle_stream_input_messages(prompts) else: - span.set_tag_str(input_tag_key, safe_json([{"content": str(prompt)} for prompt in prompts])) + input_messages = [{"content": str(prompt)} for prompt in prompts] + + span._set_ctx_items( + { + SPAN_KIND: "workflow" if is_workflow else "llm", + MODEL_NAME: span.get_tag(MODEL) or "", + MODEL_PROVIDER: span.get_tag(PROVIDER) or "", + input_tag_key: input_messages, + } + ) if span.error: - span.set_tag_str(output_tag_key, safe_json([{"content": ""}])) + span._set_ctx_item(output_tag_key, [{"content": ""}]) return if stream: message_content = [{"content": completions}] # single completion for streams else: message_content = [{"content": completion[0].text} for completion in completions.generations] - if not is_workflow: input_tokens, output_tokens, total_tokens = self.check_token_usage_chat_or_llm_result(completions) if total_tokens > 0: @@ -161,8 +171,8 @@ def _llmobs_set_tags_from_llm( OUTPUT_TOKENS_METRIC_KEY: output_tokens, TOTAL_TOKENS_METRIC_KEY: total_tokens, } - span.set_tag_str(METRICS, safe_json(metrics)) - span.set_tag_str(output_tag_key, safe_json(message_content)) + span._set_ctx_item(METRICS, metrics) + span._set_ctx_item(output_tag_key, message_content) def _llmobs_set_tags_from_chat_model( self, @@ -172,10 +182,13 @@ def _llmobs_set_tags_from_chat_model( chat_completions: Any, is_workflow: bool = False, ) -> None: - span.set_tag_str(SPAN_KIND, "workflow" if is_workflow else "llm") - span.set_tag_str(MODEL_NAME, span.get_tag(MODEL) or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag(PROVIDER) or "") - + span._set_ctx_items( + { + SPAN_KIND: "workflow" if is_workflow else "llm", + MODEL_NAME: span.get_tag(MODEL) or "", + MODEL_PROVIDER: span.get_tag(PROVIDER) or "", + } + ) input_tag_key = INPUT_VALUE if is_workflow else INPUT_MESSAGES output_tag_key = OUTPUT_VALUE if is_workflow else OUTPUT_MESSAGES stream = span.get_tag("langchain.request.stream") @@ -195,17 +208,17 @@ def _llmobs_set_tags_from_chat_model( ) role = getattr(message, "role", ROLE_MAPPING.get(message.type, "")) input_messages.append({"content": str(content), "role": str(role)}) - span.set_tag_str(input_tag_key, safe_json(input_messages)) + span._set_ctx_item(input_tag_key, input_messages) if span.error: - span.set_tag_str(output_tag_key, json.dumps([{"content": ""}])) + span._set_ctx_item(output_tag_key, [{"content": ""}]) return output_messages = [] if stream: content = chat_completions.content role = chat_completions.__class__.__name__.replace("MessageChunk", "").lower() # AIMessageChunk --> ai - span.set_tag_str(output_tag_key, safe_json([{"content": content, "role": ROLE_MAPPING.get(role, "")}])) + span._set_ctx_item(output_tag_key, [{"content": content, "role": ROLE_MAPPING.get(role, "")}]) return input_tokens, output_tokens, total_tokens = 0, 0, 0 @@ -241,7 +254,7 @@ def _llmobs_set_tags_from_chat_model( output_tokens = sum(v["output_tokens"] for v in tokens_per_choice_run_id.values()) total_tokens = sum(v["total_tokens"] for v in tokens_per_choice_run_id.values()) - span.set_tag_str(output_tag_key, safe_json(output_messages)) + span._set_ctx_item(output_tag_key, output_messages) if not is_workflow and total_tokens > 0: metrics = { @@ -249,7 +262,7 @@ def _llmobs_set_tags_from_chat_model( OUTPUT_TOKENS_METRIC_KEY: output_tokens, TOTAL_TOKENS_METRIC_KEY: total_tokens, } - span.set_tag_str(METRICS, safe_json(metrics)) + span._set_ctx_item(METRICS, metrics) def _extract_tool_calls(self, chat_completion_msg: Any) -> List[Dict[str, Any]]: """Extracts tool calls from a langchain chat completion.""" @@ -293,20 +306,17 @@ def _handle_stream_input_messages(self, inputs): return input_messages def _llmobs_set_meta_tags_from_chain(self, span: Span, args, kwargs, outputs: Any) -> None: - span.set_tag_str(SPAN_KIND, "workflow") - stream = span.get_tag("langchain.request.stream") - if stream: + if span.get_tag("langchain.request.stream"): inputs = get_argument_value(args, kwargs, 0, "input") else: inputs = kwargs + formatted_inputs = "" if inputs is not None: formatted_inputs = self.format_io(inputs) - span.set_tag_str(INPUT_VALUE, safe_json(formatted_inputs)) - if span.error or outputs is None: - span.set_tag_str(OUTPUT_VALUE, "") - return - formatted_outputs = self.format_io(outputs) - span.set_tag_str(OUTPUT_VALUE, safe_json(formatted_outputs)) + formatted_outputs = "" + if not span.error and outputs is not None: + formatted_outputs = self.format_io(outputs) + span._set_ctx_items({SPAN_KIND: "workflow", INPUT_VALUE: formatted_inputs, OUTPUT_VALUE: formatted_outputs}) def _llmobs_set_meta_tags_from_embedding( self, @@ -316,13 +326,15 @@ def _llmobs_set_meta_tags_from_embedding( output_embedding: Union[List[float], List[List[float]], None], is_workflow: bool = False, ) -> None: - span.set_tag_str(SPAN_KIND, "workflow" if is_workflow else "embedding") - span.set_tag_str(MODEL_NAME, span.get_tag(MODEL) or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag(PROVIDER) or "") - + span._set_ctx_items( + { + SPAN_KIND: "workflow" if is_workflow else "embedding", + MODEL_NAME: span.get_tag(MODEL) or "", + MODEL_PROVIDER: span.get_tag(PROVIDER) or "", + } + ) input_tag_key = INPUT_VALUE if is_workflow else INPUT_DOCUMENTS output_tag_key = OUTPUT_VALUE - output_values: Any try: @@ -335,16 +347,16 @@ def _llmobs_set_meta_tags_from_embedding( ): if is_workflow: formatted_inputs = self.format_io(input_texts) - span.set_tag_str(input_tag_key, safe_json(formatted_inputs)) + span._set_ctx_item(input_tag_key, formatted_inputs) else: if isinstance(input_texts, str): input_texts = [input_texts] input_documents = [Document(text=str(doc)) for doc in input_texts] - span.set_tag_str(input_tag_key, safe_json(input_documents)) + span._set_ctx_item(input_tag_key, input_documents) except TypeError: log.warning("Failed to serialize embedding input data to JSON") if span.error or output_embedding is None: - span.set_tag_str(output_tag_key, "") + span._set_ctx_item(output_tag_key, "") return try: if isinstance(output_embedding[0], float): @@ -356,7 +368,7 @@ def _llmobs_set_meta_tags_from_embedding( output_values = output_embedding embeddings_count = len(output_embedding) embedding_dim = len(output_values[0]) - span.set_tag_str( + span._set_ctx_item( output_tag_key, "[{} embedding(s) returned with size {}]".format(embeddings_count, embedding_dim), ) @@ -371,19 +383,22 @@ def _llmobs_set_meta_tags_from_similarity_search( output_documents: Union[List[Any], None], is_workflow: bool = False, ) -> None: - span.set_tag_str(SPAN_KIND, "workflow" if is_workflow else "retrieval") - span.set_tag_str(MODEL_NAME, span.get_tag(MODEL) or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag(PROVIDER) or "") - + span._set_ctx_items( + { + SPAN_KIND: "workflow" if is_workflow else "retrieval", + MODEL_NAME: span.get_tag(MODEL) or "", + MODEL_PROVIDER: span.get_tag(PROVIDER) or "", + } + ) input_query = get_argument_value(args, kwargs, 0, "query") if input_query is not None: formatted_inputs = self.format_io(input_query) - span.set_tag_str(INPUT_VALUE, safe_json(formatted_inputs)) + span._set_ctx_item(INPUT_VALUE, formatted_inputs) if span.error or not output_documents or not isinstance(output_documents, list): - span.set_tag_str(OUTPUT_VALUE, "") + span._set_ctx_item(OUTPUT_VALUE, "") return if is_workflow: - span.set_tag_str(OUTPUT_VALUE, "[{} document(s) retrieved]".format(len(output_documents))) + span._set_ctx_item(OUTPUT_VALUE, "[{} document(s) retrieved]".format(len(output_documents))) return documents = [] for d in output_documents: @@ -392,32 +407,31 @@ def _llmobs_set_meta_tags_from_similarity_search( metadata = getattr(d, "metadata", {}) doc["name"] = metadata.get("name", doc["id"]) documents.append(doc) - span.set_tag_str(OUTPUT_DOCUMENTS, safe_json(self.format_io(documents))) + span._set_ctx_item(OUTPUT_DOCUMENTS, self.format_io(documents)) # we set the value as well to ensure that the UI would display it in case the span was the root - span.set_tag_str(OUTPUT_VALUE, "[{} document(s) retrieved]".format(len(documents))) + span._set_ctx_item(OUTPUT_VALUE, "[{} document(s) retrieved]".format(len(documents))) def _llmobs_set_meta_tags_from_tool(self, span: Span, tool_inputs: Dict[str, Any], tool_output: object) -> None: - if span.get_tag(METADATA): - metadata = json.loads(str(span.get_tag(METADATA))) - else: - metadata = {} - - span.set_tag_str(SPAN_KIND, "tool") + metadata = json.loads(str(span.get_tag(METADATA))) if span.get_tag(METADATA) else {} + formatted_input = "" if tool_inputs is not None: tool_input = tool_inputs.get("input") if tool_inputs.get("config"): metadata["tool_config"] = tool_inputs.get("config") if tool_inputs.get("info"): metadata["tool_info"] = tool_inputs.get("info") - if metadata: - span.set_tag_str(METADATA, safe_json(metadata)) formatted_input = self.format_io(tool_input) - span.set_tag_str(INPUT_VALUE, safe_json(formatted_input)) - if span.error or tool_output is None: - span.set_tag_str(OUTPUT_VALUE, "") - return - formatted_outputs = self.format_io(tool_output) - span.set_tag_str(OUTPUT_VALUE, safe_json(formatted_outputs)) + formatted_outputs = "" + if not span.error and tool_output is not None: + formatted_outputs = self.format_io(tool_output) + span._set_ctx_items( + { + SPAN_KIND: "tool", + METADATA: metadata, + INPUT_VALUE: formatted_input, + OUTPUT_VALUE: formatted_outputs, + } + ) def _set_base_span_tags( # type: ignore[override] self, diff --git a/ddtrace/llmobs/_integrations/openai.py b/ddtrace/llmobs/_integrations/openai.py index 5c9e73eaca7..bd727b1a5a2 100644 --- a/ddtrace/llmobs/_integrations/openai.py +++ b/ddtrace/llmobs/_integrations/openai.py @@ -23,7 +23,6 @@ from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration from ddtrace.llmobs._utils import _get_attr -from ddtrace.llmobs._utils import safe_json from ddtrace.llmobs.utils import Document from ddtrace.pin import Pin @@ -148,19 +147,18 @@ def _llmobs_set_tags( ) -> None: """Sets meta tags and metrics for span events to be sent to LLMObs.""" span_kind = "embedding" if operation == "embedding" else "llm" - span.set_tag_str(SPAN_KIND, span_kind) model_name = span.get_tag("openai.response.model") or span.get_tag("openai.request.model") - span.set_tag_str(MODEL_NAME, model_name or "") model_provider = "azure_openai" if self._is_azure_openai(span) else "openai" - span.set_tag_str(MODEL_PROVIDER, model_provider) if operation == "completion": self._llmobs_set_meta_tags_from_completion(span, kwargs, response) elif operation == "chat": self._llmobs_set_meta_tags_from_chat(span, kwargs, response) elif operation == "embedding": self._llmobs_set_meta_tags_from_embedding(span, kwargs, response) - metrics = self._set_llmobs_metrics_tags(span, response) - span.set_tag_str(METRICS, safe_json(metrics)) + metrics = self._extract_llmobs_metrics_tags(span, response) + span._set_ctx_items( + {SPAN_KIND: span_kind, MODEL_NAME: model_name or "", MODEL_PROVIDER: model_provider, METRICS: metrics} + ) @staticmethod def _llmobs_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], completions: Any) -> None: @@ -168,20 +166,18 @@ def _llmobs_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], co prompt = kwargs.get("prompt", "") if isinstance(prompt, str): prompt = [prompt] - span.set_tag_str(INPUT_MESSAGES, safe_json([{"content": str(p)} for p in prompt])) - parameters = {k: v for k, v in kwargs.items() if k not in ("model", "prompt")} - span.set_tag_str(METADATA, safe_json(parameters)) - - if span.error or not completions: - span.set_tag_str(OUTPUT_MESSAGES, safe_json([{"content": ""}])) - return - if hasattr(completions, "choices"): # non-streaming response - choices = completions.choices - else: # streamed response - choices = completions - messages = [{"content": _get_attr(choice, "text", "")} for choice in choices] - span.set_tag_str(OUTPUT_MESSAGES, safe_json(messages)) + output_messages = [{"content": ""}] + if not span.error and completions: + choices = getattr(completions, "choices", completions) + output_messages = [{"content": _get_attr(choice, "text", "")} for choice in choices] + span._set_ctx_items( + { + INPUT_MESSAGES: [{"content": str(p)} for p in prompt], + METADATA: parameters, + OUTPUT_MESSAGES: output_messages, + } + ) @staticmethod def _llmobs_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages: Optional[Any]) -> None: @@ -189,16 +185,14 @@ def _llmobs_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages input_messages = [] for m in kwargs.get("messages", []): input_messages.append({"content": str(_get_attr(m, "content", "")), "role": str(_get_attr(m, "role", ""))}) - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages)) - parameters = {k: v for k, v in kwargs.items() if k not in ("model", "messages", "tools", "functions")} - span.set_tag_str(METADATA, safe_json(parameters)) + span._set_ctx_items({INPUT_MESSAGES: input_messages, METADATA: parameters}) if span.error or not messages: - span.set_tag_str(OUTPUT_MESSAGES, safe_json([{"content": ""}])) + span._set_ctx_item(OUTPUT_MESSAGES, [{"content": ""}]) return - output_messages = [] if isinstance(messages, list): # streamed response + output_messages = [] for streamed_message in messages: message = {"content": streamed_message["content"], "role": streamed_message["role"]} tool_calls = streamed_message.get("tool_calls", []) @@ -213,9 +207,10 @@ def _llmobs_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages for tool_call in tool_calls ] output_messages.append(message) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) + span._set_ctx_item(OUTPUT_MESSAGES, output_messages) return choices = _get_attr(messages, "choices", []) + output_messages = [] for idx, choice in enumerate(choices): tool_calls_info = [] choice_message = _get_attr(choice, "message", {}) @@ -241,7 +236,7 @@ def _llmobs_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages output_messages.append({"content": content, "role": role, "tool_calls": tool_calls_info}) continue output_messages.append({"content": content, "role": role}) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) + span._set_ctx_item(OUTPUT_MESSAGES, output_messages) @staticmethod def _llmobs_set_meta_tags_from_embedding(span: Span, kwargs: Dict[str, Any], resp: Any) -> None: @@ -250,7 +245,6 @@ def _llmobs_set_meta_tags_from_embedding(span: Span, kwargs: Dict[str, Any], res metadata = {"encoding_format": encoding_format} if kwargs.get("dimensions"): metadata["dimensions"] = kwargs.get("dimensions") - span.set_tag_str(METADATA, safe_json(metadata)) embedding_inputs = kwargs.get("input", "") if isinstance(embedding_inputs, str) or isinstance(embedding_inputs[0], int): @@ -258,20 +252,19 @@ def _llmobs_set_meta_tags_from_embedding(span: Span, kwargs: Dict[str, Any], res input_documents = [] for doc in embedding_inputs: input_documents.append(Document(text=str(doc))) - span.set_tag_str(INPUT_DOCUMENTS, safe_json(input_documents)) - + span._set_ctx_items({METADATA: metadata, INPUT_DOCUMENTS: input_documents}) if span.error: return if encoding_format == "float": embedding_dim = len(resp.data[0].embedding) - span.set_tag_str( + span._set_ctx_item( OUTPUT_VALUE, "[{} embedding(s) returned with size {}]".format(len(resp.data), embedding_dim) ) return - span.set_tag_str(OUTPUT_VALUE, "[{} embedding(s) returned]".format(len(resp.data))) + span._set_ctx_item(OUTPUT_VALUE, "[{} embedding(s) returned]".format(len(resp.data))) @staticmethod - def _set_llmobs_metrics_tags(span: Span, resp: Any) -> Dict[str, Any]: + def _extract_llmobs_metrics_tags(span: Span, resp: Any) -> Dict[str, Any]: """Extract metrics from a chat/completion and set them as a temporary "_ml_obs.metrics" tag.""" token_usage = _get_attr(resp, "usage", None) if token_usage is not None: diff --git a/ddtrace/llmobs/_integrations/vertexai.py b/ddtrace/llmobs/_integrations/vertexai.py index 69fdc7eb665..4019268e0c4 100644 --- a/ddtrace/llmobs/_integrations/vertexai.py +++ b/ddtrace/llmobs/_integrations/vertexai.py @@ -19,7 +19,6 @@ from ddtrace.llmobs._integrations.utils import get_system_instructions_from_google_model from ddtrace.llmobs._integrations.utils import llmobs_get_metadata_google from ddtrace.llmobs._utils import _get_attr -from ddtrace.llmobs._utils import safe_json class VertexAIIntegration(BaseLLMIntegration): @@ -41,30 +40,29 @@ def _llmobs_set_tags( response: Optional[Any] = None, operation: str = "", ) -> None: - span.set_tag_str(SPAN_KIND, "llm") - span.set_tag_str(MODEL_NAME, span.get_tag("vertexai.request.model") or "") - span.set_tag_str(MODEL_PROVIDER, span.get_tag("vertexai.request.provider") or "") - instance = kwargs.get("instance", None) history = kwargs.get("history", []) metadata = llmobs_get_metadata_google(kwargs, instance) - span.set_tag_str(METADATA, safe_json(metadata)) system_instruction = get_system_instructions_from_google_model(instance) input_contents = get_argument_value(args, kwargs, 0, "contents") input_messages = self._extract_input_message(input_contents, history, system_instruction) - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages)) - - if span.error or response is None: - span.set_tag_str(OUTPUT_MESSAGES, safe_json([{"content": ""}])) - return - output_messages = self._extract_output_message(response) - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages)) + output_messages = [{"content": ""}] + if not span.error and response is not None: + output_messages = self._extract_output_message(response) - usage = get_llmobs_metrics_tags_google("vertexai", span) - if usage: - span.set_tag_str(METRICS, safe_json(usage)) + span._set_ctx_items( + { + SPAN_KIND: "llm", + MODEL_NAME: span.get_tag("vertexai.request.model") or "", + MODEL_PROVIDER: span.get_tag("vertexai.request.provider") or "", + METADATA: metadata, + INPUT_MESSAGES: input_messages, + OUTPUT_MESSAGES: output_messages, + METRICS: get_llmobs_metrics_tags_google("vertexai", span), + } + ) def _extract_input_message(self, contents, history, system_instruction=None): from vertexai.generative_models._generative_models import Part diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index a3ac9501319..49815151118 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -23,6 +23,7 @@ from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.formats import parse_tags_str from ddtrace.llmobs._constants import ANNOTATIONS_CONTEXT_ID from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES @@ -347,8 +348,20 @@ def flush(cls) -> None: @staticmethod def _patch_integrations() -> None: - """Patch LLM integrations.""" - patch(**{integration: True for integration in SUPPORTED_LLMOBS_INTEGRATIONS.values()}) # type: ignore[arg-type] + """ + Patch LLM integrations. Ensure that we do not ignore DD_TRACE__ENABLED or DD_PATCH_MODULES settings. + """ + integrations_to_patch = {integration: True for integration in SUPPORTED_LLMOBS_INTEGRATIONS.values()} + for module, _ in integrations_to_patch.items(): + env_var = "DD_TRACE_%s_ENABLED" % module.upper() + if env_var in os.environ: + integrations_to_patch[module] = asbool(os.environ[env_var]) + dd_patch_modules = os.getenv("DD_PATCH_MODULES") + dd_patch_modules_to_str = parse_tags_str(dd_patch_modules) + integrations_to_patch.update( + {k: asbool(v) for k, v in dd_patch_modules_to_str.items() if k in SUPPORTED_LLMOBS_INTEGRATIONS.values()} + ) + patch(**integrations_to_patch) # type: ignore[arg-type] log.debug("Patched LLM integrations: %s", list(SUPPORTED_LLMOBS_INTEGRATIONS.values())) @classmethod @@ -386,23 +399,23 @@ def _start_span( if name is None: name = operation_kind span = self.tracer.trace(name, resource=operation_kind, span_type=SpanTypes.LLM) - span.set_tag_str(SPAN_KIND, operation_kind) + span._set_ctx_item(SPAN_KIND, operation_kind) if model_name is not None: - span.set_tag_str(MODEL_NAME, model_name) + span._set_ctx_item(MODEL_NAME, model_name) if model_provider is not None: - span.set_tag_str(MODEL_PROVIDER, model_provider) + span._set_ctx_item(MODEL_PROVIDER, model_provider) session_id = session_id if session_id is not None else _get_session_id(span) if session_id is not None: - span.set_tag_str(SESSION_ID, session_id) + span._set_ctx_item(SESSION_ID, session_id) if ml_app is None: ml_app = _get_ml_app(span) - span.set_tag_str(ML_APP, ml_app) + span._set_ctx_item(ML_APP, ml_app) if span.get_tag(PROPAGATED_PARENT_ID_KEY) is None: # For non-distributed traces or spans in the first service of a distributed trace, # The LLMObs parent ID tag is not set at span start time. We need to manually set the parent ID tag now # in these cases to avoid conflicting with the later propagated tags. parent_id = _get_llmobs_parent_id(span) or "undefined" - span.set_tag_str(PARENT_ID_KEY, str(parent_id)) + span._set_ctx_item(PARENT_ID_KEY, str(parent_id)) return span @classmethod @@ -625,7 +638,7 @@ def annotate( cls._tag_metrics(span, metrics) if tags is not None: cls._tag_span_tags(span, tags) - span_kind = span.get_tag(SPAN_KIND) + span_kind = span._get_ctx_item(SPAN_KIND) if parameters is not None: log.warning("Setting parameters is deprecated, please set parameters and other metadata as tags instead.") cls._tag_params(span, parameters) @@ -651,7 +664,7 @@ def _tag_prompt(span, prompt: dict) -> None: """Tags a given LLMObs span with a prompt""" try: validated_prompt = validate_prompt(prompt) - span.set_tag_str(INPUT_PROMPT, safe_json(validated_prompt)) + span._set_ctx_item(INPUT_PROMPT, validated_prompt) except TypeError: log.warning("Failed to validate prompt with error: ", exc_info=True) return @@ -664,7 +677,7 @@ def _tag_params(span: Span, params: Dict[str, Any]) -> None: if not isinstance(params, dict): log.warning("parameters must be a dictionary of key-value pairs.") return - span.set_tag_str(INPUT_PARAMETERS, safe_json(params)) + span._set_ctx_item(INPUT_PARAMETERS, params) @classmethod def _tag_llm_io(cls, span, input_messages=None, output_messages=None): @@ -676,7 +689,7 @@ def _tag_llm_io(cls, span, input_messages=None, output_messages=None): if not isinstance(input_messages, Messages): input_messages = Messages(input_messages) if input_messages.messages: - span.set_tag_str(INPUT_MESSAGES, safe_json(input_messages.messages)) + span._set_ctx_item(INPUT_MESSAGES, input_messages.messages) except TypeError: log.warning("Failed to parse input messages.", exc_info=True) if output_messages is None: @@ -686,7 +699,7 @@ def _tag_llm_io(cls, span, input_messages=None, output_messages=None): output_messages = Messages(output_messages) if not output_messages.messages: return - span.set_tag_str(OUTPUT_MESSAGES, safe_json(output_messages.messages)) + span._set_ctx_item(OUTPUT_MESSAGES, output_messages.messages) except TypeError: log.warning("Failed to parse output messages.", exc_info=True) @@ -700,12 +713,12 @@ def _tag_embedding_io(cls, span, input_documents=None, output_text=None): if not isinstance(input_documents, Documents): input_documents = Documents(input_documents) if input_documents.documents: - span.set_tag_str(INPUT_DOCUMENTS, safe_json(input_documents.documents)) + span._set_ctx_item(INPUT_DOCUMENTS, input_documents.documents) except TypeError: log.warning("Failed to parse input documents.", exc_info=True) if output_text is None: return - span.set_tag_str(OUTPUT_VALUE, safe_json(output_text)) + span._set_ctx_item(OUTPUT_VALUE, str(output_text)) @classmethod def _tag_retrieval_io(cls, span, input_text=None, output_documents=None): @@ -713,7 +726,7 @@ def _tag_retrieval_io(cls, span, input_text=None, output_documents=None): Will be mapped to span's `meta.{input,output}.text` fields. """ if input_text is not None: - span.set_tag_str(INPUT_VALUE, safe_json(input_text)) + span._set_ctx_item(INPUT_VALUE, str(input_text)) if output_documents is None: return try: @@ -721,7 +734,7 @@ def _tag_retrieval_io(cls, span, input_text=None, output_documents=None): output_documents = Documents(output_documents) if not output_documents.documents: return - span.set_tag_str(OUTPUT_DOCUMENTS, safe_json(output_documents.documents)) + span._set_ctx_item(OUTPUT_DOCUMENTS, output_documents.documents) except TypeError: log.warning("Failed to parse output documents.", exc_info=True) @@ -731,9 +744,9 @@ def _tag_text_io(cls, span, input_value=None, output_value=None): Will be mapped to span's `meta.{input,output}.values` fields. """ if input_value is not None: - span.set_tag_str(INPUT_VALUE, safe_json(input_value)) + span._set_ctx_item(INPUT_VALUE, str(input_value)) if output_value is not None: - span.set_tag_str(OUTPUT_VALUE, safe_json(output_value)) + span._set_ctx_item(OUTPUT_VALUE, str(output_value)) @staticmethod def _tag_span_tags(span: Span, span_tags: Dict[str, Any]) -> None: @@ -746,12 +759,9 @@ def _tag_span_tags(span: Span, span_tags: Dict[str, Any]) -> None: log.warning("span_tags must be a dictionary of string key - primitive value pairs.") return try: - current_tags_str = span.get_tag(TAGS) - if current_tags_str: - current_tags = json.loads(current_tags_str) - current_tags.update(span_tags) - span_tags = current_tags - span.set_tag_str(TAGS, safe_json(span_tags)) + existing_tags = span._get_ctx_item(TAGS) or {} + existing_tags.update(span_tags) + span._set_ctx_item(TAGS, existing_tags) except Exception: log.warning("Failed to parse tags.", exc_info=True) @@ -763,7 +773,7 @@ def _tag_metadata(span: Span, metadata: Dict[str, Any]) -> None: if not isinstance(metadata, dict): log.warning("metadata must be a dictionary of string key-value pairs.") return - span.set_tag_str(METADATA, safe_json(metadata)) + span._set_ctx_item(METADATA, metadata) @staticmethod def _tag_metrics(span: Span, metrics: Dict[str, Any]) -> None: @@ -773,7 +783,7 @@ def _tag_metrics(span: Span, metrics: Dict[str, Any]) -> None: if not isinstance(metrics, dict): log.warning("metrics must be a dictionary of string key - numeric value pairs.") return - span.set_tag_str(METRICS, safe_json(metrics)) + span._set_ctx_item(METRICS, metrics) @classmethod def submit_evaluation( diff --git a/ddtrace/llmobs/_trace_processor.py b/ddtrace/llmobs/_trace_processor.py index b4af0c5ffd1..231d53d7626 100644 --- a/ddtrace/llmobs/_trace_processor.py +++ b/ddtrace/llmobs/_trace_processor.py @@ -1,4 +1,3 @@ -import json from typing import Any from typing import Dict from typing import List @@ -27,7 +26,6 @@ from ddtrace.llmobs._constants import OUTPUT_DOCUMENTS from ddtrace.llmobs._constants import OUTPUT_MESSAGES from ddtrace.llmobs._constants import OUTPUT_VALUE -from ddtrace.llmobs._constants import PARENT_ID_KEY from ddtrace.llmobs._constants import RAGAS_ML_APP_PREFIX from ddtrace.llmobs._constants import RUNNER_IS_INTEGRATION_SPAN_TAG from ddtrace.llmobs._constants import SESSION_ID @@ -37,6 +35,7 @@ from ddtrace.llmobs._utils import _get_ml_app from ddtrace.llmobs._utils import _get_session_id from ddtrace.llmobs._utils import _get_span_name +from ddtrace.llmobs._utils import safe_json log = get_logger(__name__) @@ -62,7 +61,7 @@ def process_trace(self, trace: List[Span]) -> Optional[List[Span]]: def submit_llmobs_span(self, span: Span) -> None: """Generate and submit an LLMObs span event to be sent to LLMObs.""" span_event = None - is_llm_span = span.get_tag(SPAN_KIND) == "llm" + is_llm_span = span._get_ctx_item(SPAN_KIND) == "llm" is_ragas_integration_span = False try: span_event, is_ragas_integration_span = self._llmobs_span_event(span) @@ -77,44 +76,49 @@ def submit_llmobs_span(self, span: Span) -> None: def _llmobs_span_event(self, span: Span) -> Tuple[Dict[str, Any], bool]: """Span event object structure.""" - span_kind = span._meta.pop(SPAN_KIND) + span_kind = span._get_ctx_item(SPAN_KIND) + if not span_kind: + raise KeyError("Span kind not found in span context") meta: Dict[str, Any] = {"span.kind": span_kind, "input": {}, "output": {}} - if span_kind in ("llm", "embedding") and span.get_tag(MODEL_NAME) is not None: - meta["model_name"] = span._meta.pop(MODEL_NAME) - meta["model_provider"] = span._meta.pop(MODEL_PROVIDER, "custom").lower() - if span.get_tag(METADATA) is not None: - meta["metadata"] = json.loads(span._meta.pop(METADATA)) - if span.get_tag(INPUT_PARAMETERS): - meta["input"]["parameters"] = json.loads(span._meta.pop(INPUT_PARAMETERS)) - if span_kind == "llm" and span.get_tag(INPUT_MESSAGES) is not None: - meta["input"]["messages"] = json.loads(span._meta.pop(INPUT_MESSAGES)) - if span.get_tag(INPUT_VALUE) is not None: - meta["input"]["value"] = span._meta.pop(INPUT_VALUE) - if span_kind == "llm" and span.get_tag(OUTPUT_MESSAGES) is not None: - meta["output"]["messages"] = json.loads(span._meta.pop(OUTPUT_MESSAGES)) - if span_kind == "embedding" and span.get_tag(INPUT_DOCUMENTS) is not None: - meta["input"]["documents"] = json.loads(span._meta.pop(INPUT_DOCUMENTS)) - if span.get_tag(OUTPUT_VALUE) is not None: - meta["output"]["value"] = span._meta.pop(OUTPUT_VALUE) - if span_kind == "retrieval" and span.get_tag(OUTPUT_DOCUMENTS) is not None: - meta["output"]["documents"] = json.loads(span._meta.pop(OUTPUT_DOCUMENTS)) - if span.get_tag(INPUT_PROMPT) is not None: - prompt_json_str = span._meta.pop(INPUT_PROMPT) + if span_kind in ("llm", "embedding") and span._get_ctx_item(MODEL_NAME) is not None: + meta["model_name"] = span._get_ctx_item(MODEL_NAME) + meta["model_provider"] = (span._get_ctx_item(MODEL_PROVIDER) or "custom").lower() + meta["metadata"] = span._get_ctx_item(METADATA) or {} + if span._get_ctx_item(INPUT_PARAMETERS): + meta["input"]["parameters"] = span._get_ctx_item(INPUT_PARAMETERS) + if span_kind == "llm" and span._get_ctx_item(INPUT_MESSAGES) is not None: + meta["input"]["messages"] = span._get_ctx_item(INPUT_MESSAGES) + if span._get_ctx_item(INPUT_VALUE) is not None: + meta["input"]["value"] = safe_json(span._get_ctx_item(INPUT_VALUE)) + if span_kind == "llm" and span._get_ctx_item(OUTPUT_MESSAGES) is not None: + meta["output"]["messages"] = span._get_ctx_item(OUTPUT_MESSAGES) + if span_kind == "embedding" and span._get_ctx_item(INPUT_DOCUMENTS) is not None: + meta["input"]["documents"] = span._get_ctx_item(INPUT_DOCUMENTS) + if span._get_ctx_item(OUTPUT_VALUE) is not None: + meta["output"]["value"] = safe_json(span._get_ctx_item(OUTPUT_VALUE)) + if span_kind == "retrieval" and span._get_ctx_item(OUTPUT_DOCUMENTS) is not None: + meta["output"]["documents"] = span._get_ctx_item(OUTPUT_DOCUMENTS) + if span._get_ctx_item(INPUT_PROMPT) is not None: + prompt_json_str = span._get_ctx_item(INPUT_PROMPT) if span_kind != "llm": log.warning( "Dropping prompt on non-LLM span kind, annotating prompts is only supported for LLM span kinds." ) else: - meta["input"]["prompt"] = json.loads(prompt_json_str) + meta["input"]["prompt"] = prompt_json_str if span.error: - meta[ERROR_MSG] = span.get_tag(ERROR_MSG) - meta[ERROR_STACK] = span.get_tag(ERROR_STACK) - meta[ERROR_TYPE] = span.get_tag(ERROR_TYPE) + meta.update( + { + ERROR_MSG: span.get_tag(ERROR_MSG), + ERROR_STACK: span.get_tag(ERROR_STACK), + ERROR_TYPE: span.get_tag(ERROR_TYPE), + } + ) if not meta["input"]: meta.pop("input") if not meta["output"]: meta.pop("output") - metrics = json.loads(span._meta.pop(METRICS, "{}")) + metrics = span._get_ctx_item(METRICS) or {} ml_app = _get_ml_app(span) is_ragas_integration_span = False @@ -122,10 +126,8 @@ def _llmobs_span_event(self, span: Span) -> Tuple[Dict[str, Any], bool]: if ml_app.startswith(RAGAS_ML_APP_PREFIX): is_ragas_integration_span = True - span.set_tag_str(ML_APP, ml_app) - + span._set_ctx_item(ML_APP, ml_app) parent_id = str(_get_llmobs_parent_id(span) or "undefined") - span._meta.pop(PARENT_ID_KEY, None) llmobs_span_event = { "trace_id": "{:x}".format(span.trace_id), @@ -140,7 +142,7 @@ def _llmobs_span_event(self, span: Span) -> Tuple[Dict[str, Any], bool]: } session_id = _get_session_id(span) if session_id is not None: - span.set_tag_str(SESSION_ID, session_id) + span._set_ctx_item(SESSION_ID, session_id) llmobs_span_event["session_id"] = session_id llmobs_span_event["tags"] = self._llmobs_tags( @@ -169,7 +171,7 @@ def _llmobs_tags( tags["session_id"] = session_id if is_ragas_integration_span: tags[RUNNER_IS_INTEGRATION_SPAN_TAG] = "ragas" - existing_tags = span._meta.pop(TAGS, None) + existing_tags = span._get_ctx_item(TAGS) if existing_tags is not None: - tags.update(json.loads(existing_tags)) + tags.update(existing_tags) return ["{}:{}".format(k, v) for k, v in tags.items()] diff --git a/ddtrace/llmobs/_utils.py b/ddtrace/llmobs/_utils.py index 8813788f0a3..c1b1c4a776c 100644 --- a/ddtrace/llmobs/_utils.py +++ b/ddtrace/llmobs/_utils.py @@ -110,8 +110,8 @@ def _get_llmobs_parent_id(span: Span) -> Optional[str]: """Return the span ID of the nearest LLMObs-type span in the span's ancestor tree. In priority order: manually set parent ID tag, nearest LLMObs ancestor, local root's propagated parent ID tag. """ - if span.get_tag(PARENT_ID_KEY): - return span.get_tag(PARENT_ID_KEY) + if span._get_ctx_item(PARENT_ID_KEY): + return span._get_ctx_item(PARENT_ID_KEY) nearest_llmobs_ancestor = _get_nearest_llmobs_ancestor(span) if nearest_llmobs_ancestor: return str(nearest_llmobs_ancestor.span_id) @@ -132,12 +132,12 @@ def _get_ml_app(span: Span) -> str: Return the ML app name for a given span, by checking the span's nearest LLMObs span ancestor. Default to the global config LLMObs ML app name otherwise. """ - ml_app = span.get_tag(ML_APP) + ml_app = span._get_ctx_item(ML_APP) if ml_app: return ml_app nearest_llmobs_ancestor = _get_nearest_llmobs_ancestor(span) if nearest_llmobs_ancestor: - ml_app = nearest_llmobs_ancestor.get_tag(ML_APP) + ml_app = nearest_llmobs_ancestor._get_ctx_item(ML_APP) return ml_app or config._llmobs_ml_app or "unknown-ml-app" @@ -146,12 +146,12 @@ def _get_session_id(span: Span) -> Optional[str]: Return the session ID for a given span, by checking the span's nearest LLMObs span ancestor. Default to the span's trace ID. """ - session_id = span.get_tag(SESSION_ID) + session_id = span._get_ctx_item(SESSION_ID) if session_id: return session_id nearest_llmobs_ancestor = _get_nearest_llmobs_ancestor(span) if nearest_llmobs_ancestor: - session_id = nearest_llmobs_ancestor.get_tag(SESSION_ID) + session_id = nearest_llmobs_ancestor._get_ctx_item(SESSION_ID) return session_id diff --git a/ddtrace/llmobs/_writer.py b/ddtrace/llmobs/_writer.py index 6496de96cfe..5a293f05c4e 100644 --- a/ddtrace/llmobs/_writer.py +++ b/ddtrace/llmobs/_writer.py @@ -1,5 +1,4 @@ import atexit -import json from typing import Any from typing import Dict from typing import List @@ -32,6 +31,7 @@ from ddtrace.llmobs._constants import EVP_PROXY_AGENT_ENDPOINT from ddtrace.llmobs._constants import EVP_SUBDOMAIN_HEADER_NAME from ddtrace.llmobs._constants import EVP_SUBDOMAIN_HEADER_VALUE +from ddtrace.llmobs._utils import safe_json logger = get_logger(__name__) @@ -108,11 +108,7 @@ def periodic(self) -> None: self._buffer = [] data = self._data(events) - try: - enc_llm_events = json.dumps(data) - except TypeError: - logger.error("failed to encode %d LLMObs %s events", len(events), self._event_type, exc_info=True) - return + enc_llm_events = safe_json(data) conn = httplib.HTTPSConnection(self._intake, 443, timeout=self._timeout) try: conn.request("POST", self._endpoint, enc_llm_events, self._headers) @@ -197,7 +193,7 @@ def put(self, events: List[LLMObsSpanEvent]): ) return self._buffer.extend(events) - self.buffer_size += len(json.dumps(events)) + self.buffer_size += len(safe_json(events)) def encode(self): with self._lock: @@ -207,7 +203,7 @@ def encode(self): self._init_buffer() data = {"_dd.stage": "raw", "_dd.tracer_version": ddtrace.__version__, "event_type": "span", "spans": events} try: - enc_llm_events = json.dumps(data) + enc_llm_events = safe_json(data) logger.debug("encode %d LLMObs span events to be sent", len(events)) except TypeError: logger.error("failed to encode %d LLMObs span events", len(events), exc_info=True) @@ -277,7 +273,7 @@ def stop(self, timeout=None): super(LLMObsSpanWriter, self).stop(timeout=timeout) def enqueue(self, event: LLMObsSpanEvent) -> None: - event_size = len(json.dumps(event)) + event_size = len(safe_json(event)) if event_size >= EVP_EVENT_SIZE_LIMIT: logger.warning( diff --git a/ddtrace/llmobs/decorators.py b/ddtrace/llmobs/decorators.py index 93f329f2889..7e61f9b4e18 100644 --- a/ddtrace/llmobs/decorators.py +++ b/ddtrace/llmobs/decorators.py @@ -172,7 +172,7 @@ def generator_wrapper(*args, **kwargs): func_signature = signature(func) bound_args = func_signature.bind_partial(*args, **kwargs) if _automatic_io_annotation and bound_args.arguments: - LLMObs.annotate(span=span, input_data=bound_args.arguments) + LLMObs.annotate(span=span, input_data=dict(bound_args.arguments)) return yield_from_async_gen(func, span, args, kwargs) @wraps(func) @@ -186,13 +186,13 @@ async def wrapper(*args, **kwargs): func_signature = signature(func) bound_args = func_signature.bind_partial(*args, **kwargs) if _automatic_io_annotation and bound_args.arguments: - LLMObs.annotate(span=span, input_data=bound_args.arguments) + LLMObs.annotate(span=span, input_data=dict(bound_args.arguments)) resp = await func(*args, **kwargs) if ( _automatic_io_annotation and resp and operation_kind != "retrieval" - and span.get_tag(OUTPUT_VALUE) is None + and span._get_ctx_item(OUTPUT_VALUE) is None ): LLMObs.annotate(span=span, output_data=resp) return resp @@ -211,7 +211,7 @@ def generator_wrapper(*args, **kwargs): func_signature = signature(func) bound_args = func_signature.bind_partial(*args, **kwargs) if _automatic_io_annotation and bound_args.arguments: - LLMObs.annotate(span=span, input_data=bound_args.arguments) + LLMObs.annotate(span=span, input_data=dict(bound_args.arguments)) try: yield from func(*args, **kwargs) except (StopIteration, GeneratorExit): @@ -234,13 +234,13 @@ def wrapper(*args, **kwargs): func_signature = signature(func) bound_args = func_signature.bind_partial(*args, **kwargs) if _automatic_io_annotation and bound_args.arguments: - LLMObs.annotate(span=span, input_data=bound_args.arguments) + LLMObs.annotate(span=span, input_data=dict(bound_args.arguments)) resp = func(*args, **kwargs) if ( _automatic_io_annotation and resp and operation_kind != "retrieval" - and span.get_tag(OUTPUT_VALUE) is None + and span._get_ctx_item(OUTPUT_VALUE) is None ): LLMObs.annotate(span=span, output_data=resp) return resp diff --git a/ddtrace/profiling/collector/_lock.py b/ddtrace/profiling/collector/_lock.py index f2d1289d73b..4ee0e692fac 100644 --- a/ddtrace/profiling/collector/_lock.py +++ b/ddtrace/profiling/collector/_lock.py @@ -338,12 +338,12 @@ def __init__( self.export_libdd_enabled = False @abc.abstractmethod - def _get_original(self): + def _get_patch_target(self): # type: (...) -> typing.Any pass @abc.abstractmethod - def _set_original( + def _set_patch_target( self, value, # type: typing.Any ): @@ -367,7 +367,7 @@ def patch(self): """Patch the module for tracking lock allocation.""" # We only patch the lock from the `threading` module. # Nobody should use locks from `_thread`; if they do so, then it's deliberate and we don't profile. - self.original = self._get_original() + self._original = self._get_patch_target() def _allocate_lock(wrapped, instance, args, kwargs): lock = wrapped(*args, **kwargs) @@ -381,9 +381,9 @@ def _allocate_lock(wrapped, instance, args, kwargs): self.export_libdd_enabled, ) - self._set_original(FunctionWrapper(self.original, _allocate_lock)) + self._set_patch_target(FunctionWrapper(self._original, _allocate_lock)) def unpatch(self): # type: (...) -> None """Unpatch the threading module for tracking lock allocation.""" - self._set_original(self.original) + self._set_patch_target(self._original) diff --git a/ddtrace/profiling/collector/_memalloc.c b/ddtrace/profiling/collector/_memalloc.c index 3b7f7db293f..3876517baaf 100644 --- a/ddtrace/profiling/collector/_memalloc.c +++ b/ddtrace/profiling/collector/_memalloc.c @@ -42,47 +42,95 @@ static PyObject* object_string = NULL; #define ALLOC_TRACKER_MAX_COUNT UINT64_MAX +// The data coordination primitives in this and related files are related to a crash we started seeing. +// We don't have a precise understanding of the causal factors within the runtime that lead to this condition, +// since the GIL alone was sufficient in the past for preventing this issue. +// We add an option here to _add_ a crash, in order to observe this condition in a future diagnostic iteration. +// **This option is _intended_ to crash the Python process** do not use without a good reason! +static char g_crash_on_mutex_pass_str[] = "_DD_PROFILING_MEMALLOC_CRASH_ON_MUTEX_PASS"; +static const char* g_truthy_values[] = { "1", "true", "yes", "on", "enable", "enabled", NULL }; // NB the sentinel NULL +static memlock_t g_memalloc_lock; + static alloc_tracker_t* global_alloc_tracker; +// This is a multiplatform way to define an operation to happen at static initialization time +static void +memalloc_init(void); + +#ifdef _MSC_VER +#pragma section(".CRT$XCU", read) +__declspec(allocate(".CRT$XCU")) void (*memalloc_init_func)(void) = memalloc_init; + +#elif defined(__GNUC__) || defined(__clang__) +__attribute__((constructor)) +#else +#error Unsupported compiler +#endif +static void +memalloc_init() +{ + // Check if we should crash the process on mutex pass + char* crash_on_mutex_pass_str = getenv(g_crash_on_mutex_pass_str); + bool crash_on_mutex_pass = false; + if (crash_on_mutex_pass_str) { + for (int i = 0; g_truthy_values[i]; i++) { + if (strcmp(crash_on_mutex_pass_str, g_truthy_values[i]) == 0) { + crash_on_mutex_pass = true; + break; + } + } + } + memlock_init(&g_memalloc_lock, crash_on_mutex_pass); +} + static void memalloc_add_event(memalloc_context_t* ctx, void* ptr, size_t size) { - /* Do not overflow; just ignore the new events if we ever reach that point */ - if (global_alloc_tracker->alloc_count >= ALLOC_TRACKER_MAX_COUNT) + uint64_t alloc_count = atomic_add_clamped(&global_alloc_tracker->alloc_count, 1, ALLOC_TRACKER_MAX_COUNT); + + /* Return if we've reached the maximum number of allocations */ + if (alloc_count == 0) return; - global_alloc_tracker->alloc_count++; + // Return if we can't take the guard + if (!memalloc_take_guard()) { + return; + } - /* Avoid loops */ - if (memalloc_get_reentrant()) + // In this implementation, the `global_alloc_tracker` isn't intrinsically protected. Before we read or modify, + // take the lock. The count of allocations is already forward-attributed elsewhere, so if we can't take the lock + // there's nothing to do. + if (!memlock_trylock(&g_memalloc_lock)) { return; + } /* Determine if we can capture or if we need to sample */ if (global_alloc_tracker->allocs.count < ctx->max_events) { - /* set a barrier so we don't loop as getting a traceback allocates memory */ - memalloc_set_reentrant(true); /* Buffer is not full, fill it */ traceback_t* tb = memalloc_get_traceback(ctx->max_nframe, ptr, size, ctx->domain); - memalloc_set_reentrant(false); - if (tb) + if (tb) { traceback_array_append(&global_alloc_tracker->allocs, tb); + } } else { /* Sampling mode using a reservoir sampling algorithm: replace a random * traceback with this one */ - uint64_t r = random_range(global_alloc_tracker->alloc_count); + uint64_t r = random_range(alloc_count); - if (r < ctx->max_events) { - /* set a barrier so we don't loop as getting a traceback allocates memory */ - memalloc_set_reentrant(true); + // In addition to event size, need to check that the tab is in a good state + if (r < ctx->max_events && global_alloc_tracker->allocs.tab != NULL) { /* Replace a random traceback with this one */ traceback_t* tb = memalloc_get_traceback(ctx->max_nframe, ptr, size, ctx->domain); - memalloc_set_reentrant(false); + + // Need to check not only that the tb returned if (tb) { traceback_free(global_alloc_tracker->allocs.tab[r]); global_alloc_tracker->allocs.tab[r] = tb; } } } + + memlock_unlock(&g_memalloc_lock); + memalloc_yield_guard(); } static void @@ -98,12 +146,6 @@ memalloc_free(void* ctx, void* ptr) alloc->free(alloc->ctx, ptr); } -#ifdef _PY37_AND_LATER -Py_tss_t memalloc_reentrant_key = Py_tss_NEEDS_INIT; -#else -int memalloc_reentrant_key = -1; -#endif - static void* memalloc_alloc(int use_calloc, void* ctx, size_t nelem, size_t elsize) { @@ -233,7 +275,10 @@ memalloc_start(PyObject* Py_UNUSED(module), PyObject* args) global_memalloc_ctx.domain = PYMEM_DOMAIN_OBJ; - global_alloc_tracker = alloc_tracker_new(); + if (memlock_trylock(&g_memalloc_lock)) { + global_alloc_tracker = alloc_tracker_new(); + memlock_unlock(&g_memalloc_lock); + } PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &global_memalloc_ctx.pymem_allocator_obj); PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); @@ -258,8 +303,11 @@ memalloc_stop(PyObject* Py_UNUSED(module), PyObject* Py_UNUSED(args)) PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &global_memalloc_ctx.pymem_allocator_obj); memalloc_tb_deinit(); - alloc_tracker_free(global_alloc_tracker); - global_alloc_tracker = NULL; + if (memlock_trylock(&g_memalloc_lock)) { + alloc_tracker_free(global_alloc_tracker); + global_alloc_tracker = NULL; + memlock_unlock(&g_memalloc_lock); + } memalloc_heap_tracker_deinit(); @@ -310,9 +358,15 @@ iterevents_new(PyTypeObject* type, PyObject* Py_UNUSED(args), PyObject* Py_UNUSE if (!iestate) return NULL; - iestate->alloc_tracker = global_alloc_tracker; /* reset the current traceback list */ - global_alloc_tracker = alloc_tracker_new(); + if (memlock_trylock(&g_memalloc_lock)) { + iestate->alloc_tracker = global_alloc_tracker; + global_alloc_tracker = alloc_tracker_new(); + memlock_unlock(&g_memalloc_lock); + } else { + Py_TYPE(iestate)->tp_free(iestate); + return NULL; + } iestate->seq_index = 0; PyObject* iter_and_count = PyTuple_New(3); @@ -326,8 +380,11 @@ iterevents_new(PyTypeObject* type, PyObject* Py_UNUSED(args), PyObject* Py_UNUSE static void iterevents_dealloc(IterEventsState* iestate) { - alloc_tracker_free(iestate->alloc_tracker); - Py_TYPE(iestate)->tp_free(iestate); + if (memlock_trylock(&g_memalloc_lock)) { + alloc_tracker_free(iestate->alloc_tracker); + Py_TYPE(iestate)->tp_free(iestate); + memlock_unlock(&g_memalloc_lock); + } } static PyObject* @@ -442,20 +499,6 @@ PyInit__memalloc(void) return NULL; } -#ifdef _PY37_AND_LATER - if (PyThread_tss_create(&memalloc_reentrant_key) != 0) { -#else - memalloc_reentrant_key = PyThread_create_key(); - if (memalloc_reentrant_key == -1) { -#endif -#ifdef MS_WINDOWS - PyErr_SetFromWindowsErr(0); -#else - PyErr_SetFromErrno(PyExc_OSError); -#endif - return NULL; - } - if (PyType_Ready(&MemallocIterEvents_Type) < 0) return NULL; Py_INCREF((PyObject*)&MemallocIterEvents_Type); diff --git a/ddtrace/profiling/collector/_memalloc_heap.c b/ddtrace/profiling/collector/_memalloc_heap.c index d6531d7b095..d2a5cc29eee 100644 --- a/ddtrace/profiling/collector/_memalloc_heap.c +++ b/ddtrace/profiling/collector/_memalloc_heap.c @@ -9,13 +9,13 @@ typedef struct { /* Granularity of the heap profiler in bytes */ - uint32_t sample_size; + uint64_t sample_size; /* Current sample size of the heap profiler in bytes */ - uint32_t current_sample_size; + uint64_t current_sample_size; /* Tracked allocations */ traceback_array_t allocs; /* Allocated memory counter in bytes */ - uint32_t allocated_memory; + uint64_t allocated_memory; /* True if the heap tracker is frozen */ bool frozen; /* Contains the ongoing heap allocation/deallocation while frozen */ @@ -26,8 +26,42 @@ typedef struct } freezer; } heap_tracker_t; +static char g_crash_on_mutex_pass_str[] = "_DD_PROFILING_MEMHEAP_CRASH_ON_MUTEX_PASS"; +static const char* g_truthy_values[] = { "1", "true", "yes", "on", "enable", "enabled", NULL }; // NB the sentinel NULL +static memlock_t g_memheap_lock; + static heap_tracker_t global_heap_tracker; +// This is a multiplatform way to define an operation to happen at static initialization time +static void +memheap_init(void); + +#ifdef _MSC_VER +#pragma section(".CRT$XCU", read) +__declspec(allocate(".CRT$XCU")) void (*memheap_init_func)(void) = memheap_init; + +#elif defined(__GNUC__) || defined(__clang__) +__attribute__((constructor)) +#else +#error Unsupported compiler +#endif +static void +memheap_init() +{ + // Check if we should crash the process on mutex pass + char* crash_on_mutex_pass_str = getenv(g_crash_on_mutex_pass_str); + bool crash_on_mutex_pass = false; + if (crash_on_mutex_pass_str) { + for (int i = 0; g_truthy_values[i]; i++) { + if (strcmp(crash_on_mutex_pass_str, g_truthy_values[i]) == 0) { + crash_on_mutex_pass = true; + break; + } + } + } + memlock_init(&g_memheap_lock, crash_on_mutex_pass); +} + static uint32_t heap_tracker_next_sample_size(uint32_t sample_size) { @@ -119,20 +153,30 @@ heap_tracker_thaw(heap_tracker_t* heap_tracker) void memalloc_heap_tracker_init(uint32_t sample_size) { - heap_tracker_init(&global_heap_tracker); - global_heap_tracker.sample_size = sample_size; - global_heap_tracker.current_sample_size = heap_tracker_next_sample_size(sample_size); + + if (memlock_trylock(&g_memheap_lock)) { + heap_tracker_init(&global_heap_tracker); + global_heap_tracker.sample_size = sample_size; + global_heap_tracker.current_sample_size = heap_tracker_next_sample_size(sample_size); + memlock_unlock(&g_memheap_lock); + } } void memalloc_heap_tracker_deinit(void) { - heap_tracker_wipe(&global_heap_tracker); + if (memlock_trylock(&g_memheap_lock)) { + heap_tracker_wipe(&global_heap_tracker); + memlock_unlock(&g_memheap_lock); + } } void memalloc_heap_untrack(void* ptr) { + if (!memlock_trylock(&g_memheap_lock)) { + return; + } if (global_heap_tracker.frozen) { /* Check that we still have space to store the free. If we don't have enough space, we ignore the untrack. That's sad as there is a change @@ -144,6 +188,8 @@ memalloc_heap_untrack(void* ptr) ptr_array_append(&global_heap_tracker.freezer.frees, ptr); } else heap_tracker_untrack_thawed(&global_heap_tracker, ptr); + + memlock_unlock(&g_memheap_lock); } /* Track a memory allocation in the heap profiler. @@ -157,26 +203,36 @@ memalloc_heap_track(uint16_t max_nframe, void* ptr, size_t size, PyMemAllocatorD return false; /* Check for overflow */ - global_heap_tracker.allocated_memory = Py_MIN(global_heap_tracker.allocated_memory + size, MAX_HEAP_SAMPLE_SIZE); + uint64_t res = atomic_add_clamped(&global_heap_tracker.allocated_memory, size, MAX_HEAP_SAMPLE_SIZE); + if (0 == res) + return false; + + // Take the lock + if (!memlock_trylock(&g_memheap_lock)) { + return false; + } /* Check if we have enough sample or not */ - if (global_heap_tracker.allocated_memory < global_heap_tracker.current_sample_size) + if (global_heap_tracker.allocated_memory < global_heap_tracker.current_sample_size) { + memlock_unlock(&g_memheap_lock); return false; + } /* Check if we can add more samples: the sum of the freezer + alloc tracker cannot be greater than what the alloc tracker can handle: when the alloc tracker is thawed, all the allocs in the freezer will be moved there!*/ - if ((global_heap_tracker.freezer.allocs.count + global_heap_tracker.allocs.count) >= TRACEBACK_ARRAY_MAX_COUNT) + if (global_heap_tracker.freezer.allocs.count + global_heap_tracker.allocs.count >= TRACEBACK_ARRAY_MAX_COUNT) { + memlock_unlock(&g_memheap_lock); return false; + } /* Avoid loops */ - if (memalloc_get_reentrant()) + if (!memalloc_take_guard()) { + memlock_unlock(&g_memheap_lock); return false; + } - memalloc_set_reentrant(true); traceback_t* tb = memalloc_get_traceback(max_nframe, ptr, global_heap_tracker.allocated_memory, domain); - memalloc_set_reentrant(false); - if (tb) { if (global_heap_tracker.frozen) traceback_array_append(&global_heap_tracker.freezer.allocs, tb); @@ -189,15 +245,23 @@ memalloc_heap_track(uint16_t max_nframe, void* ptr, size_t size, PyMemAllocatorD /* Compute the new target sample size */ global_heap_tracker.current_sample_size = heap_tracker_next_sample_size(global_heap_tracker.sample_size); + memalloc_yield_guard(); + memlock_unlock(&g_memheap_lock); return true; } + memalloc_yield_guard(); + memlock_unlock(&g_memheap_lock); return false; } PyObject* memalloc_heap() { + if (!memlock_trylock(&g_memheap_lock)) { + return NULL; + } + heap_tracker_freeze(&global_heap_tracker); PyObject* heap_list = PyList_New(global_heap_tracker.allocs.count); @@ -213,5 +277,6 @@ memalloc_heap() heap_tracker_thaw(&global_heap_tracker); + memlock_unlock(&g_memheap_lock); return heap_list; } diff --git a/ddtrace/profiling/collector/_memalloc_reentrant.c b/ddtrace/profiling/collector/_memalloc_reentrant.c new file mode 100644 index 00000000000..d360d19fb30 --- /dev/null +++ b/ddtrace/profiling/collector/_memalloc_reentrant.c @@ -0,0 +1,3 @@ +#include "_memalloc_reentrant.h" + +bool _MEMALLOC_ON_THREAD = false; diff --git a/ddtrace/profiling/collector/_memalloc_reentrant.h b/ddtrace/profiling/collector/_memalloc_reentrant.h index 5c8a552294e..cb4aa246961 100644 --- a/ddtrace/profiling/collector/_memalloc_reentrant.h +++ b/ddtrace/profiling/collector/_memalloc_reentrant.h @@ -1,50 +1,188 @@ #ifndef _DDTRACE_MEMALLOC_REENTRANT_H #define _DDTRACE_MEMALLOC_REENTRANT_H -#include "_pymacro.h" +#ifdef _WIN32 +#include +#else +#define _POSIX_C_SOURCE 200809L +#include +#include +#include +#include +#include +#endif #include +#include +#include -#ifndef _PY37_AND_LATER -#include +// Cross-platform macro for defining thread-local storage +// NB - we use dynamic-global on Linux because the others are problematic +#if defined(_MSC_VER) // Check for MSVC compiler +#define MEMALLOC_TLS __declspec(thread) +#elif defined(__GNUC__) || defined(__clang__) // GCC or Clang +#define MEMALLOC_TLS __attribute__((tls_model("global-dynamic"))) __thread +#else +#error "Unsupported compiler for thread-local storage" #endif +extern bool _MEMALLOC_ON_THREAD; + +// This is a saturating atomic add for 32- and 64-bit platforms. +// In order to implement the saturation logic, use a CAS loop. +// From the GCC docs: +// "‘__atomic’ builtins can be used with any integral scalar or pointer type that is 1, 2, 4, or 8 bytes in length" +// From the MSVC docs: +// "_InterlockedCompareExchange64 is available on x86 systems running on any Pentium architecture; it is not +// available on 386 or 486 architectures." +static inline uint64_t +atomic_add_clamped(uint64_t* target, uint64_t amount, uint64_t max) +{ + // In reality, there's virtually no scenario in which this deadlocks. Just the same, give it some arbitrarily high + // limit in order to prevent unpredicted deadlocks. 96 is chosen since it's the number of cores on the largest + // consumer CPU generally used by our customers. + int attempts = 96; + while (attempts--) { + uint64_t old_val = (volatile uint64_t) * target; -#ifdef _PY37_AND_LATER -extern Py_tss_t memalloc_reentrant_key; + // CAS loop + saturation check + uint64_t new_val = old_val + amount; + if (new_val > max || new_val < old_val) { + return 0; + } +#if defined(_MSC_VER) + uint64_t prev_val = + (uint64_t)InterlockedCompareExchange64((volatile LONG64*)target, (LONG64)new_val, (LONG64)old_val); + if (prev_val == old_val) { + return new_val; + } +#elif defined(__clang__) || defined(__GNUC__) + if (atomic_compare_exchange_strong_explicit( + (_Atomic uint64_t*)target, &old_val, new_val, memory_order_seq_cst, memory_order_seq_cst)) { + return new_val; + } #else -extern int memalloc_reentrant_key; +#error "Unsupported compiler for atomic operations" #endif + // If we reach here, CAS failed; another thread changed `target` + // Retry until success or until we detect max. + } -/* Any non-NULL pointer can be used */ -#define _MEMALLOC_REENTRANT_VALUE Py_True + return 0; +} -static inline void -memalloc_set_reentrant(bool reentrant) +// Opaque lock type +typedef struct +{ +#ifdef _WIN32 + HANDLE mutex; +#else + pthread_mutex_t mutex; +#endif +} memlock_t; + +// Global setting; if a lock fails to be acquired, crash +static bool g_crash_on_mutex_pass = false; + +// Generic initializer +static inline bool +memlock_init(memlock_t* lock, bool crash_on_pass) +{ + if (!lock) + return false; + + g_crash_on_mutex_pass = crash_on_pass; + +#ifdef _WIN32 + lock->mutex = CreateMutex(NULL, FALSE, NULL); + return lock->mutex != NULL; +#else + // For POSIX systems, we make sure to use an ERRORCHECK type mutex, since it pushes some of the state checking + // down to the implementation. + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_ERRORCHECK); + return pthread_mutex_init(&lock->mutex, NULL) == 0; +#endif +} + +// Unlock function +static inline bool +memlock_unlock(memlock_t* lock) { - if (reentrant) -#ifdef _PY37_AND_LATER - PyThread_tss_set(&memalloc_reentrant_key, _MEMALLOC_REENTRANT_VALUE); + if (!lock) + return false; + +#ifdef _WIN32 + return ReleaseMutex(lock->mutex); #else - PyThread_set_key_value(memalloc_reentrant_key, _MEMALLOC_REENTRANT_VALUE); + return pthread_mutex_unlock(&lock->mutex) == 0; +#endif +} + +// trylock function +static inline bool +memlock_trylock(memlock_t* lock) +{ + if (!lock) + return false; + +#ifdef __linux__ + // On Linux, we need to make sure we didn't just fork + // pthreads will guarantee the lock is consistent, but we at least need to clear it + static pid_t my_pid = 0; + if (my_pid == 0) { + my_pid = getpid(); + } else if (my_pid != getpid()) { + // We've forked, so we need to free the lock + memlock_unlock(lock); + my_pid = getpid(); + } #endif - else -#ifdef _PY37_AND_LATER - PyThread_tss_set(&memalloc_reentrant_key, NULL); + +#ifdef _WIN32 + bool result = WAIT_OBJECT_0 == WaitForSingleObject(lock->mutex, 0); // 0ms timeout -> no wait #else - PyThread_set_key_value(memalloc_reentrant_key, NULL); + bool result = 0 == pthread_mutex_trylock(&lock->mutex); #endif + if (!result && g_crash_on_mutex_pass) { + // segfault + int* p = NULL; + *p = 0; + abort(); // should never reach here + } + + return result; } +// Cleanup function static inline bool -memalloc_get_reentrant(void) +memlock_destroy(memlock_t* lock) { -#ifdef _PY37_AND_LATER - if (PyThread_tss_get(&memalloc_reentrant_key)) + if (!lock) + return false; + +#ifdef _WIN32 + return CloseHandle(lock->mutex); #else - if (PyThread_get_key_value(memalloc_reentrant_key)) + return 0 == pthread_mutex_destroy(&lock->mutex); #endif - return true; +} - return false; +static inline bool +memalloc_take_guard() +{ + // Ordinarilly, a process-wide semaphore would require a CAS, but since this is thread-local we can just set it. + if (_MEMALLOC_ON_THREAD) + return false; + _MEMALLOC_ON_THREAD = true; + return true; +} + +static inline void +memalloc_yield_guard(void) +{ + // Ideally, we'd actually capture the old state within an object and restore it, but since this is + // a coarse-grained lock, we just set it to false. + _MEMALLOC_ON_THREAD = false; } #endif diff --git a/ddtrace/profiling/collector/_memalloc_tb.c b/ddtrace/profiling/collector/_memalloc_tb.c index ba79021f719..bb265fe08d5 100644 --- a/ddtrace/profiling/collector/_memalloc_tb.c +++ b/ddtrace/profiling/collector/_memalloc_tb.c @@ -87,6 +87,9 @@ memalloc_tb_deinit(void) void traceback_free(traceback_t* tb) { + if (!tb) + return; + for (uint16_t nframe = 0; nframe < tb->nframe; nframe++) { Py_DECREF(tb->frames[nframe].filename); Py_DECREF(tb->frames[nframe].name); @@ -197,11 +200,7 @@ memalloc_get_traceback(uint16_t max_nframe, void* ptr, size_t size, PyMemAllocat traceback->size = size; traceback->ptr = ptr; -#ifdef _PY37_AND_LATER traceback->thread_id = PyThread_get_thread_ident(); -#else - traceback->thread_id = tstate->thread_id; -#endif traceback->domain = domain; diff --git a/ddtrace/profiling/collector/_pymacro.h b/ddtrace/profiling/collector/_pymacro.h index e71ed6888b9..aa31c3d4cc1 100644 --- a/ddtrace/profiling/collector/_pymacro.h +++ b/ddtrace/profiling/collector/_pymacro.h @@ -13,8 +13,4 @@ #define _PY38 #endif -#if PY_VERSION_HEX >= 0x03070000 -#define _PY37_AND_LATER -#endif - #endif diff --git a/ddtrace/profiling/collector/asyncio.py b/ddtrace/profiling/collector/asyncio.py index af57db3d3ad..fe5b63ab8ce 100644 --- a/ddtrace/profiling/collector/asyncio.py +++ b/ddtrace/profiling/collector/asyncio.py @@ -36,11 +36,11 @@ def _start_service(self): self._asyncio_module = asyncio return super(AsyncioLockCollector, self)._start_service() - def _get_original(self): + def _get_patch_target(self): # type: (...) -> typing.Any return self._asyncio_module.Lock - def _set_original( + def _set_patch_target( self, value # type: typing.Any ): # type: (...) -> None diff --git a/ddtrace/profiling/collector/pytorch.py b/ddtrace/profiling/collector/pytorch.py new file mode 100644 index 00000000000..3d9e636871d --- /dev/null +++ b/ddtrace/profiling/collector/pytorch.py @@ -0,0 +1,211 @@ +from __future__ import absolute_import + +import abc +import logging +import random +import typing + +import wrapt + +from ddtrace._trace.tracer import Tracer +from ddtrace.internal.datadog.profiling import ddup +from ddtrace.profiling import _threading +from ddtrace.profiling import collector +from ddtrace.profiling.recorder import Recorder +from ddtrace.settings.profiling import config + + +LOG = logging.getLogger(__name__) + + +class _WrappedTorchProfiler(wrapt.ObjectProxy): + def __init__( + self, + wrapped: typing.Any, + recorder: Recorder, + tracer: typing.Optional[Tracer], + ) -> None: + wrapt.ObjectProxy.__init__(self, wrapped) + self.on_trace_ready = handle_torch_trace + self._self_recorder = recorder + self._self_tracer = tracer + + +class MLProfilerCollector(collector.CaptureSamplerCollector): + """Record ML framework (i.e. pytorch) profiler usage.""" + + def __init__(self, recorder=None): + super().__init__(recorder) + self.tracer = None + # Holds the pytorch profiler object which is wrapped by this class + self._original: typing.Any = None + + @abc.abstractmethod + def _get_patch_target(self): + # type: (...) -> typing.Any + pass + + @abc.abstractmethod + def _set_patch_target( + self, + value, # type: typing.Any + ): + # type: (...) -> None + pass + + def _start_service(self): + # type: (...) -> None + """Start collecting framework profiler usage.""" + try: + import torch + except ImportError as e: + raise collector.CollectorUnavailable(e) + self._torch_module = torch + self.patch() + super()._start_service() + + def _stop_service(self): + # type: (...) -> None + """Stop collecting framework profiler usage.""" + super()._stop_service() + self.unpatch() + + def patch(self): + # type: (...) -> None + """Patch the module for tracking profiling data.""" + # We only patch the profile call from the `torch.profiler` module. + self._original = self._get_patch_target() + + def profiler_init(wrapped, instance, args, kwargs): + profiler = wrapped(*args, **kwargs) + return self.PROFILED_TORCH_CLASS( + profiler, + self.recorder, + self.tracer, + ) + + self._set_patch_target(wrapt.FunctionWrapper(self._original, profiler_init)) + + def unpatch(self): + # type: (...) -> None + """Unpatch the torch.profiler module for tracking profiling data.""" + self._set_patch_target(self._original) + + +class TorchProfilerCollector(MLProfilerCollector): + """Monkey patch torch.profiler.profile usage.""" + + PROFILED_TORCH_CLASS = _WrappedTorchProfiler + + def __init__(self, recorder=None): + super().__init__(recorder) + + def _get_patch_target(self): + # type: (...) -> typing.Any + return self._torch_module.profiler.profile + + def _set_patch_target( + self, value # type: typing.Any + ): + # type: (...) -> None + self._torch_module.profiler.profile = value + + +def handle_torch_trace(prof): + NANOS_PER_MICROSECOND = 1e3 + LOG.debug("handle_torch_trace called") + events = prof.events() + if len(events) == 0: + return + + # need an upper bound of events collected, can be adjusted based on profile size. + # Sadly, there is no way AFAICT to tell the PyTorch profiler itself to limit the num of samples. + # We truncate to keep the uploaded profile to a reasonable size. + # For now, experiment with a default of 1_000_000 if nothing is set. + # TODO, better values here. + collection_fraction = 1.0 + num_events_to_report = min(len(events), config.pytorch.events_limit or 1_000_000) + if num_events_to_report < len(events): + LOG.debug("Dropped events. num_events_to_report %d. len(events): %d", num_events_to_report, len(events)) + collection_fraction = num_events_to_report / len(events) + + empty_events_count = 0 + + # earlier versions use microsecond, later versions use nanosecond + kineto_results = prof.profiler.kineto_results + if hasattr(kineto_results, "trace_start_ns"): + trace_start_ns = kineto_results.trace_start_ns() + elif hasattr(kineto_results, "trace_start_us"): + trace_start_ns = kineto_results.trace_start_us() * NANOS_PER_MICROSECOND + else: + raise AttributeError("Neither trace_start_ns nor trace_start_us exists") + + for e in events: + if collection_fraction < random.random(): # nosec: used for sampling, not security + continue + + handle = ddup.SampleHandle() + data_added = False + + # cpu time sample + if e.cpu_time > 0: + data_added = True + handle.push_cputime(int(e.cpu_time * NANOS_PER_MICROSECOND), e.count) + + # gpu time sample - both device_time and cuda_time are in microseconds + if hasattr(e, "device_time") and e.device_time > 0: + data_added = True + time_elapsed = int(e.device_time * NANOS_PER_MICROSECOND) + handle.push_gpu_gputime(time_elapsed, e.count) + elif hasattr(e, "cuda_time") and e.cuda_time > 0: + data_added = True + time_elapsed = int(e.cuda_time * NANOS_PER_MICROSECOND) + handle.push_gpu_gputime(time_elapsed, e.count) + + # gpu flops sample + if e.flops is not None and e.flops > 0: + data_added = True + handle.push_gpu_flops(e.flops, e.count) + + # GPU memory usage + # earlier versions of torch use cuda_memory_usage, recent versions use device_memory_usage + if hasattr(e, "device_memory_usage") and e.device_memory_usage is not None and e.device_memory_usage > 0: + data_added = True + handle.push_gpu_memory(e.device_memory_usage, e.count) + elif hasattr(e, "cuda_memory_usage") and e.cuda_memory_usage is not None and e.cuda_memory_usage > 0: + data_added = True + handle.push_gpu_memory(e.cuda_memory_usage, e.count) + + # If there is data, flush it to the profile. + # Otherwise, do nothing and the sample object will be dropped when it goes out of scope + if data_added: + handle.push_frame(e.name, "unknown-file", 0, 0) + # Pushing pseudoframes for the device name ("device.CPU" or "device.CUDA") + # onto the stack allows differentation of pytorch frames from other profiling frames + # in the flame graph. + # Note that stacks go root last, so this goes at the end + handle.push_frame("PYTORCH_" + str(e.device_type), "unknown-file", 0, 0) + + handle.push_gpu_device_name("cuda " + str(e.device_index)) + + if str(e.device_type).startswith("DeviceType.CPU"): + # There is a known issue with getting thread ids and names from pytorch. + # If we can't get one, just use a default name. + handle.push_threadinfo( + e.thread, + _threading.get_thread_native_id(e.thread), + _threading.get_thread_name(e.thread) or "PYTORCH-CPU-THREAD-" + str(e.thread), + ) + elif str(e.device_type).startswith("DeviceType.CUDA"): + handle.push_threadinfo( + e.thread, _threading.get_thread_native_id(e.thread), "PYTORCH-CUDA-" + str(e.device_index) + ) + else: + raise AttributeError(f"Unexpected device_type {e.device_type}") + + handle.push_absolute_ns(int(trace_start_ns + e.time_range.end * NANOS_PER_MICROSECOND)) + handle.flush_sample() + else: + if empty_events_count % 1000 == 0: + LOG.debug("%d events with no data to record: %s", empty_events_count, e) + empty_events_count += 1 diff --git a/ddtrace/profiling/collector/stack.pyx b/ddtrace/profiling/collector/stack.pyx index f3758d13989..c7ba1ec3e83 100644 --- a/ddtrace/profiling/collector/stack.pyx +++ b/ddtrace/profiling/collector/stack.pyx @@ -157,7 +157,11 @@ from cpython.ref cimport Py_DECREF cdef extern from "": PyObject* _PyThread_CurrentFrames() -IF PY_VERSION_HEX >= 0x030b0000: +IF PY_VERSION_HEX >= 0x30d0000: + cdef extern from "": + PyObject* _PyThread_CurrentExceptions() + +ELIF PY_VERSION_HEX >= 0x030b0000: cdef extern from "": PyObject* _PyThread_CurrentExceptions() diff --git a/ddtrace/profiling/collector/threading.py b/ddtrace/profiling/collector/threading.py index 86daee689f6..3700c145312 100644 --- a/ddtrace/profiling/collector/threading.py +++ b/ddtrace/profiling/collector/threading.py @@ -32,11 +32,11 @@ class ThreadingLockCollector(_lock.LockCollector): PROFILED_LOCK_CLASS = _ProfiledThreadingLock - def _get_original(self): + def _get_patch_target(self): # type: (...) -> typing.Any return threading.Lock - def _set_original( + def _set_patch_target( self, value # type: typing.Any ): # type: (...) -> None diff --git a/ddtrace/profiling/exporter/http.py b/ddtrace/profiling/exporter/http.py index f23e12acb5a..6700e584ade 100644 --- a/ddtrace/profiling/exporter/http.py +++ b/ddtrace/profiling/exporter/http.py @@ -15,7 +15,6 @@ from ddtrace.internal import agent from ddtrace.internal import runtime from ddtrace.internal.processor.endpoint_call_counter import EndpointCallCounterProcessor -from ddtrace.internal.runtime import container from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter from ddtrace.profiling import exporter from ddtrace.profiling import recorder # noqa:F401 @@ -67,7 +66,6 @@ def __init__( self.version: typing.Optional[str] = version self.tags: typing.Dict[str, str] = tags if tags is not None else {} self.max_retry_delay: typing.Optional[float] = max_retry_delay - self._container_info: typing.Optional[container.CGroupInfo] = container.get_container_info() self.endpoint_call_counter_span_processor: typing.Optional[ EndpointCallCounterProcessor ] = endpoint_call_counter_span_processor @@ -183,9 +181,6 @@ def export( else: headers = {} - container.update_headers_with_container_info(headers, self._container_info) - container.update_header_with_external_info(headers) - profile, libs = super(PprofHTTPExporter, self).export(events, start_time_ns, end_time_ns) pprof = io.BytesIO() with gzip.GzipFile(fileobj=pprof, mode="wb") as gz: diff --git a/ddtrace/profiling/exporter/pprof.pyx b/ddtrace/profiling/exporter/pprof.pyx index 121909727f1..9ed4aed5f0f 100644 --- a/ddtrace/profiling/exporter/pprof.pyx +++ b/ddtrace/profiling/exporter/pprof.pyx @@ -455,6 +455,7 @@ class _PprofConverter(object): for _ in ( (packages.filename_to_package(filename), filename) for filename, lineno, funcname in self._locations + if not packages.is_user_code(filename) ) if _[0] is not None }, _ITEMGETTER_ZERO diff --git a/ddtrace/profiling/profiler.py b/ddtrace/profiling/profiler.py index fa4bdb79a3e..111c1624fd2 100644 --- a/ddtrace/profiling/profiler.py +++ b/ddtrace/profiling/profiler.py @@ -24,6 +24,7 @@ from ddtrace.profiling import scheduler from ddtrace.profiling.collector import asyncio from ddtrace.profiling.collector import memalloc +from ddtrace.profiling.collector import pytorch from ddtrace.profiling.collector import stack from ddtrace.profiling.collector import stack_event from ddtrace.profiling.collector import threading @@ -120,6 +121,7 @@ def __init__( _stack_collector_enabled: bool = profiling_config.stack.enabled, _stack_v2_enabled: bool = profiling_config.stack.v2_enabled, _lock_collector_enabled: bool = profiling_config.lock.enabled, + _pytorch_collector_enabled: bool = profiling_config.pytorch.enabled, enable_code_provenance: bool = profiling_config.code_provenance, endpoint_collection_enabled: bool = profiling_config.endpoint_collection, ): @@ -135,6 +137,7 @@ def __init__( self._stack_collector_enabled: bool = _stack_collector_enabled self._stack_v2_enabled: bool = _stack_v2_enabled self._lock_collector_enabled: bool = _lock_collector_enabled + self._pytorch_collector_enabled: bool = _pytorch_collector_enabled self.enable_code_provenance: bool = enable_code_provenance self.endpoint_collection_enabled: bool = endpoint_collection_enabled @@ -219,6 +222,12 @@ def _build_default_exporters(self): LOG.error("Profiling failures occurred in an injected instance of ddtrace, disabling profiling") return [] + # pytorch collector relies on libdd exporter + if self._pytorch_collector_enabled: + LOG.error("Disabling pytorch profiler as libdd collector failed to initialize") + config.pytorch.enabled = False + self._pytorch_collector_enabled = False + # DEV: Import this only if needed to avoid importing protobuf # unnecessarily from ddtrace.profiling.exporter import http @@ -297,6 +306,33 @@ def start_collector(collector_class: Type) -> None: for module, hook in self._collectors_on_import: ModuleWatchdog.register_module_hook(module, hook) + if self._pytorch_collector_enabled: + + def start_collector(collector_class: Type) -> None: + with self._service_lock: + col = collector_class(r) + + if self.status == service.ServiceStatus.RUNNING: + # The profiler is already running so we need to start the collector + try: + col.start() + LOG.debug("Started pytorch collector %r", col) + except collector.CollectorUnavailable: + LOG.debug("Collector %r pytorch is unavailable, disabling", col) + return + except Exception: + LOG.error("Failed to start collector %r pytorch, disabling.", col, exc_info=True) + return + + self._collectors.append(col) + + self._collectors_on_import = [ + ("torch", lambda _: start_collector(pytorch.TorchProfilerCollector)), + ] + + for module, hook in self._collectors_on_import: + ModuleWatchdog.register_module_hook(module, hook) + if self._memory_collector_enabled: self._collectors.append(memalloc.MemoryCollector(r)) @@ -311,6 +347,7 @@ def start_collector(collector_class: Type) -> None: recorder=r, exporters=exporters, before_flush=self._collectors_snapshot, + tracer=self.tracer, ) def _collectors_snapshot(self): diff --git a/ddtrace/profiling/scheduler.py b/ddtrace/profiling/scheduler.py index 9f286f8688b..e8aafe7a63b 100644 --- a/ddtrace/profiling/scheduler.py +++ b/ddtrace/profiling/scheduler.py @@ -7,6 +7,8 @@ from typing import Optional from typing import Sequence # noqa F401 +import ddtrace +from ddtrace._trace.tracer import Tracer from ddtrace.internal import compat from ddtrace.internal import periodic from ddtrace.internal.datadog.profiling import ddup @@ -30,6 +32,7 @@ def __init__( recorder: Optional[Recorder] = None, exporters: Optional[List[Exporter]] = None, before_flush: Optional[Callable] = None, + tracer: Optional[Tracer] = ddtrace.tracer, interval: float = config.upload_interval, ): super(Scheduler, self).__init__(interval=interval) @@ -38,6 +41,7 @@ def __init__( self.before_flush: Optional[Callable] = before_flush self._configured_interval: float = self.interval self._last_export: int = 0 # Overridden in _start_service + self._tracer = tracer self._export_libdd_enabled: bool = config.export.libdd_enabled def _start_service(self): @@ -59,7 +63,7 @@ def flush(self): LOG.error("Scheduler before_flush hook failed", exc_info=True) if self._export_libdd_enabled: - ddup.upload() + ddup.upload(self._tracer) # These are only used by the Python uploader, but set them here to keep logs/etc # consistent for now diff --git a/ddtrace/settings/asm.py b/ddtrace/settings/asm.py index 3ec15ae67ef..cf20ea08f1a 100644 --- a/ddtrace/settings/asm.py +++ b/ddtrace/settings/asm.py @@ -156,6 +156,16 @@ class ASMConfig(Env): + r"ey[I-L][\w=-]+\.ey[I-L][\w=-]+(\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY" + r"[\-]{5}[^\-]+[\-]{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}", ) + _iast_max_concurrent_requests = Env.var( + int, + IAST.DD_IAST_MAX_CONCURRENT_REQUESTS, + default=2, + ) + _iast_max_vulnerabilities_per_requests = Env.var( + int, + IAST.DD_IAST_VULNERABILITIES_PER_REQUEST, + default=2, + ) _iast_lazy_taint = Env.var(bool, IAST.LAZY_TAINT, default=False) _deduplication_enabled = Env.var(bool, "_DD_APPSEC_DEDUPLICATION_ENABLED", default=True) @@ -213,6 +223,8 @@ class ASMConfig(Env): "_iast_redaction_enabled", "_iast_redaction_name_pattern", "_iast_redaction_value_pattern", + "_iast_max_concurrent_requests", + "_iast_max_vulnerabilities_per_requests", "_iast_lazy_taint", "_ep_stack_trace_enabled", "_ep_max_stack_traces", diff --git a/ddtrace/settings/profiling.py b/ddtrace/settings/profiling.py index ad8d8794d69..94d71f1778b 100644 --- a/ddtrace/settings/profiling.py +++ b/ddtrace/settings/profiling.py @@ -92,7 +92,13 @@ def _is_libdd_required(config): # libdd... requires libdd # injected environments _cannot_ deploy protobuf, so they must use libdd # timeline requires libdd - return config.stack.v2_enabled or config.export._libdd_enabled or config._injected or config.timeline_enabled + return ( + config.stack.v2_enabled + or config.export._libdd_enabled + or config._injected + or config.timeline_enabled + or config.pytorch.enabled + ) # This value indicates whether or not profiling is _loaded_ in an injected environment. It does not by itself @@ -399,6 +405,26 @@ class ProfilingConfigHeap(En): sample_size = En.d(int, _derive_default_heap_sample_size) +class ProfilingConfigPytorch(En): + __item__ = __prefix__ = "pytorch" + + enabled = En.v( + bool, + "enabled", + default=False, + help_type="Boolean", + help="Whether to enable the PyTorch profiler", + ) + + events_limit = En.v( + int, + "events_limit", + default=1_000_000, + help_type="Integer", + help="How many events the PyTorch profiler records each collection", + ) + + class ProfilingConfigExport(En): __item__ = __prefix__ = "export" @@ -416,6 +442,7 @@ class ProfilingConfigExport(En): ProfilingConfig.include(ProfilingConfigLock, namespace="lock") ProfilingConfig.include(ProfilingConfigMemory, namespace="memory") ProfilingConfig.include(ProfilingConfigHeap, namespace="heap") +ProfilingConfig.include(ProfilingConfigPytorch, namespace="pytorch") ProfilingConfig.include(ProfilingConfigExport, namespace="export") config = ProfilingConfig() @@ -466,6 +493,8 @@ def config_str(config): configured_features.append("mem") if config.heap.sample_size > 0: configured_features.append("heap") + if config.pytorch.enabled: + configured_features.append("pytorch") if config.export.libdd_enabled: configured_features.append("exp_dd") else: diff --git a/docker-compose.yml b/docker-compose.yml index bed5a6ce8ee..cf40a4a256d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -129,7 +129,7 @@ services: volumes: - ddagent:/tmp/ddagent:rw testagent: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.17.0 + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.20.0 ports: - "127.0.0.1:9126:8126" volumes: @@ -152,6 +152,10 @@ services: - "127.0.0.1:5433:5433" testrunner: + # DEV uncomment to test local changes to the Dockerfile + # build: + # context: ./docker + # dockerfile: Dockerfile image: ghcr.io/datadog/dd-trace-py/testrunner:47c7b5287da25643e46652e6d222a40a52f2382a@sha256:3a02dafeff9cd72966978816d1b39b54f5517af4049396923b95c8452f604269 command: bash environment: diff --git a/docker/.python-version b/docker/.python-version index decc1955c11..9924540f9a4 100644 --- a/docker/.python-version +++ b/docker/.python-version @@ -4,4 +4,4 @@ 3.9 3.10 3.11 -3.13-dev +3.13 diff --git a/docker/Dockerfile b/docker/Dockerfile index 79f207724db..8ff9be89e48 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,7 @@ # DEV: Use `debian:slim` instead of an `alpine` image to support installing wheels from PyPI # this drastically improves test execution time since python dependencies don't all # have to be built from source all the time (grpcio takes forever to install) -FROM debian:buster-20221219-slim +FROM debian:bookworm-slim ARG TARGETARCH ARG HATCH_VERSION=1.12.0 @@ -34,7 +34,6 @@ RUN apt-get update \ gnupg \ jq \ libbz2-dev \ - libenchant-dev \ libffi-dev \ liblzma-dev \ libmemcached-dev \ @@ -47,9 +46,7 @@ RUN apt-get update \ libsqlite3-dev \ libsqliteodbc \ libssh-dev \ - libssl-dev \ patch \ - python-openssl\ unixodbc-dev \ wget \ zlib1g-dev \ @@ -61,7 +58,7 @@ RUN apt-get install -y --no-install-recommends nodejs npm \ # MariaDB is a dependency for tests RUN curl https://mariadb.org/mariadb_release_signing_key.pgp | gpg --dearmor > /etc/apt/trusted.gpg.d/mariadb.gpg \ - && echo "deb [arch=amd64,arm64] https://mirror.mariadb.org/repo/11.rolling/debian/ buster main" > /etc/apt/sources.list.d/mariadb.list \ + && echo "deb [arch=amd64,arm64] https://mirror.mariadb.org/repo/11.rolling/debian/ bookworm main" > /etc/apt/sources.list.d/mariadb.list \ && apt-get update \ && apt-get install -y --no-install-recommends libmariadb-dev libmariadb-dev-compat @@ -71,7 +68,7 @@ RUN if [ "$TARGETARCH" = "amd64" ]; \ then \ curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg \ && mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg \ - && echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-buster-prod buster main" > /etc/apt/sources.list.d/dotnetdev.list \ + && echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-bookworm-prod bookworm main" > /etc/apt/sources.list.d/dotnetdev.list \ && apt-get update \ && apt-get install -y --no-install-recommends azure-functions-core-tools-4=4.0.6280-1; \ fi @@ -93,7 +90,7 @@ RUN curl https://sh.rustup.rs -sSf | \ sh -s -- --default-toolchain stable -y # Install pyenv and necessary Python versions -RUN git clone --depth 1 --branch v2.4.2 https://github.com/pyenv/pyenv "${PYENV_ROOT}" \ +RUN git clone --depth 1 --branch v2.4.22 https://github.com/pyenv/pyenv "${PYENV_ROOT}" \ && cd /root \ && pyenv local | xargs -L 1 pyenv install \ && cd - diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index fd556ef8770..309b6178c56 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -737,3 +737,85 @@ To avoid such duplicate log entries from ``ddtrace``, you can remove the automat ddtrace_logger = logging.getLogger("ddtrace") for handler in ddtrace_logger.handlers: ddtrace_logger.removeHandler(handler) + +PyTorch Profiling +----------------- + +The PyTorch profiler can be used to trace CPU and GPU events that occur when running inference or training on a PyTorch model. +The PyTorch profiler as it's `typically used `__, will output a trace json file to +local disk that can be loaded in a visualization tool like TensorBoard or Perfetto. With the dd-trace-py PyTorch profiler integration, we instrument the `profiler API `__ +to automatically export this data to Datadog for visualization without having to manually copy files between servers. + +The requirements for using this feature are: + +- must be using the `torch.profiler` module which was introduced in PyTorch version `1.8.1`. +- must set the environment variable `DD_PROFILING_PYTORCH_ENABLED=true`. + +It is important to note that we offer no different performance guarantees than the PyTorch profiler itself, which is not recommended to run in production continuously due to memory and CPU overhead. This +is an experimental feature which should be run with caution as it can add significant overhead. Additionally, please note that running this feature in certain +configurations can conflict with other features. For instance, running the NSight Systems or NSight Compute profiler alongside the PyTorch profiler on the same machine at the same time will likely lead to +errors as CUPTI generally does not support multiple concurrent readers. + + +Below is an example program using the well known `CIFAR-10 `__ dataset for image classification. +This can be run through the command line (assuming that a Datadog agent is running in the same environment) with: + +.. code-block:: bash + + DD_SERVICE=test-pytorch-service DD_PROFILING_PYTORCH_ENABLED=true DD_PROFILING_ENABLED=true ddtrace-run python cifar10.py + +.. code-block:: python + + import torch + import torch.nn + import torch.optim + import torch.utils.data + import torchvision.datasets + import torchvision.models + import torchvision.transforms as T + from torchvision.models import resnet18, ResNet18_Weights + + from torch.profiler import ProfilerActivity + + + def cifar(): + transform = T.Compose( + [T.Resize(224), T.ToTensor(), T.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))] + ) + train_set = torchvision.datasets.CIFAR10( + root="./data", train=True, download=True, transform=transform + ) + train_loader = torch.utils.data.DataLoader(train_set, batch_size=32, shuffle=True) + device = torch.device("cuda") + model = resnet18(weights=ResNet18_Weights.DEFAULT).cuda() + criterion = torch.nn.CrossEntropyLoss() + optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9) + model.train() + + def train(data): + inputs, labels = data[0].to(device=device), data[1].to(device=device) + outputs = model(inputs) + loss = criterion(outputs, labels) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + with torch.profiler.profile( + activities=[ProfilerActivity.CUDA], + ): + for step, batch_data in enumerate(train_loader): + print("step #%d" % step) + if step >= (1 + 1 + 3) * 2: + break + train(batch_data) + + + if __name__ == "__main__": + cifar() + +The profiling data is then visible under the Timeseries tab in the profiling page. For instance, the GPU Time by Kernel Name metric is shown below +for an application serving inference with an LLM through PyTorch: + +.. image:: pytorch_metric.png + :width: 600 + :alt: Alternative text \ No newline at end of file diff --git a/docs/integrations.rst b/docs/integrations.rst index d07fbe33e45..04a94007626 100644 --- a/docs/integrations.rst +++ b/docs/integrations.rst @@ -478,6 +478,13 @@ urllib3 .. automodule:: ddtrace.contrib.urllib3 +.. _vertexai: + +vertexai +^^^^^^^^^^^^^^^^^^^ +.. automodule:: ddtrace.contrib.vertexai + + .. _vertica: Vertica diff --git a/docs/pytorch_metric.png b/docs/pytorch_metric.png new file mode 100644 index 00000000000..f3b26cd57d5 Binary files /dev/null and b/docs/pytorch_metric.png differ diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index d3c185a9360..6bfa3f90bc1 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -1,19 +1,6 @@ + AArch -AnyCallable -autoreload -autoreloading -CPython -Fargate -Firehose -Gunicorn -HTTPPropagator -INfo -IPv -MySQL -OpenTracing -Runtimes -RuntimeErrors -SpanContext +agentless aiobotocore aiohttp aiomysql @@ -21,22 +8,26 @@ aiopg aioredis algolia algoliasearch -agentless analytics +AnyCallable api app appsec +AppSec aredis args ascii asgi asm +assertIn async asyncio asyncpg attrs autodetected autopatching +autoreload +autoreloading aws backend backends @@ -45,15 +36,18 @@ backported backporting bdd bikeshedding +Blowfish booleans boto botocore -CGroup cassandra +cattrs +CGroup cgroups cherrypy ciapp client_ip +CMake codepath collect committer @@ -64,15 +58,20 @@ contextvar contextvars coroutine coroutines +CPU +CPython +CUPTI Cython datadog datadoghq +dataset datastore datastores dbapi ddtrace deallocating deprecations +DES deserializing django docstring @@ -80,60 +79,75 @@ doctest dogpile dogpile.cache dogstatsd -dunder dsn +dunder elasticsearch elasticsearch1 elasticsearch7 embeddings +Enablement enqueue +enqueuer entrypoint entrypoints env -enqueuer eol eventbridge exec +Fargate fastapi +Firehose formatter -gRPC generativeai gevent -graphql +Gitlab +GPU graphene +graphql greenlet greenlets grpc +gRPC gunicorn +Gunicorn hostname hostnames +hotspot http httplib +HTTPPropagator https httpx -iPython iast +IAST +importlib +INfo ini InitContainer initializer integration integrations ip +IPv +iPython iterable -JSON jinja js +JSON kafka kinesis +Kinesis kombu kubernetes kwarg kwargs -LLM langchain langchain_community +libdatadog +libddwaf lifecycle linters +LLM lockfiles logbook loguru @@ -150,30 +164,37 @@ middlewares misencoded moderations mongoengine +msgpack multiline multiprocess multithreaded mypy mysql +MySQL mysqlclient mysqldb -msgpack +# tests/contrib/openai/test_openai_v1.py +Nam namespace NeedsAppKey +NSight obfuscator +ObjectProxy +oce openai opensearch opentelemetry opentracer opentracing +OpenTracing otel -ObjectProxy packfile packfiles parameterized parsers patcher perf +Perfetto pid plugin posix @@ -183,6 +204,7 @@ preconfigured prepend prepended profiler +programmatically protobuf proxying psutil @@ -199,28 +221,34 @@ pyodbc pyston pytest pytest-bdd +PyTorch quickstart ratelimit redis rediscluster renderer renderers -resolvers repo +resolvers respawn riotfile -rq +RLock rowcount +rq runnable runtime -runtimes runtime-id -RLock +RuntimeErrors +runtimes +Runtimes sanic screenshots serializable +serverless +Serverless sha sns +SpanContext sql sqlalchemy sqlite @@ -239,23 +267,27 @@ subdomains submodule submodules substring +suitespec +TensorBoard testagent TestCase testrunner +Timeseries timestamp tokenizer tracecontext tracestate tweens -uWSGI +# docs/configuration.rst +uest unbuffered unicode uninstrumented unittest unix +unobfuscated unpatch unpatched -unobfuscated unregister unshallow unvendored @@ -264,8 +296,10 @@ url urls username uvicorn +uWSGI vendored versioned +vertexai vertica w3c websocket @@ -277,26 +311,3 @@ Wrapt wsgi xfail yaaredis -Kinesis -AppSec -libddwaf -Serverless -serverless -cattrs -IAST -programmatically -DES -Blowfish -Gitlab -Enablement -hotspot -CMake -libdatadog -importlib -oce -assertIn -# tests/contrib/openai/test_openai_v1.py -Nam -# docs/configuration.rst -uest -suitespec diff --git a/docs/versioning.rst b/docs/versioning.rst index 0972213f51c..fdd71f8de08 100644 --- a/docs/versioning.rst +++ b/docs/versioning.rst @@ -109,17 +109,17 @@ Supported runtimes * - Linux - x86-64, i686, AArch64 - CPython - - 3.7-3.12 + - 3.7-3.13 - ``>=2.0,<3`` * - MacOS - Intel, Apple Silicon - CPython - - 3.7-3.12 + - 3.7-3.13 - ``>=2.0,<3`` * - Windows - 64bit, 32bit - CPython - - 3.7-3.12 + - 3.7-3.13 - ``>=2.0,<3`` * - Linux - x86-64, i686, AArch64 diff --git a/hatch.toml b/hatch.toml index 7bc7e107c04..614054dbfed 100644 --- a/hatch.toml +++ b/hatch.toml @@ -124,7 +124,7 @@ extra-dependencies = [ ] [envs.slotscheck.scripts] -_ = [ +test = [ "python -m slotscheck -v ddtrace/", ] @@ -133,14 +133,19 @@ _ = [ detached = true python = "3.10" extra-dependencies = [ + "lxml==5.3.0", "packaging==23.1", "ruamel.yaml==0.18.6", + "vcrpy==6.0.2", ] [envs.scripts.scripts] test = [ "python -m doctest {args} scripts/get-target-milestone.py scripts/needs_testrun.py tests/suitespec.py", ] +needs_testrun = [ + "scripts/needs_testrun.py {args}", +] [envs.meta-testing] python = "3.10" @@ -149,9 +154,12 @@ extra-dependencies = [ "pytest-cov", "hypothesis<6.45.1" ] +[envs.meta-testing.env-vars] +DD_CIVISIBILITY_FLAKY_RETRY_ENABLED = "0" + [envs.meta-testing.scripts] -meta-testing = [ - "pytest {args} tests/meta" +test = [ + "pytest {args} --no-ddtrace tests/meta" ] [envs.integration_test] @@ -167,7 +175,7 @@ extra-dependencies = [ ] [[envs.integration_test.matrix]] -python = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +python = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] [envs.integration_test.env-vars] _DD_CIVISIBILITY_USE_CI_CONTEXT_PROVIDER = "1" @@ -289,7 +297,7 @@ test = [ ] [[envs.appsec_iast_native.matrix]] -python = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +python = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] ## ASM FastAPI @@ -359,9 +367,37 @@ test = [ ] [[envs.appsec_aggregated_leak_testing.matrix]] -python = ["3.10", "3.11", "3.12"] +python = ["3.10", "3.11", "3.12", "3.13"] + + + +## pytorch profiling test + +[envs.profiling_pytorch] +dependencies = [ + "pytest", + "pytest-cov", + "requests", + "hypothesis", + "torch>=1.8.1", + "torchvision", + "lz4", +] +[envs.profiling_pytorch.env-vars] +DD_PROFILING_ENABLED = "true" +DD_PROFILING_PYTORCH_ENABLED = "true" +CMAKE_BUILD_PARALLEL_LEVEL = "12" + +[envs.profiling_pytorch.scripts] +test = [ + "uname -a", + "pip freeze", + "python -m pytest tests/profiling_v2/test_pytorch.py -vvv --capture=tee-sys", +] +[[envs.profiling_pytorch.matrix]] +python = ["3.12"] ## Unit Tests @@ -435,7 +471,7 @@ pytest = ["~=6.0", "~=7.0"] [[envs.pytest_plugin_v2.matrix]] -python = ["3.9", "3.10", "3.12"] +python = ["3.9", "3.10", "3.12", "3.13"] pytest = ["~=6.0", "~=7.0", "~=8.0"] [envs.snapshot_viewer] diff --git a/lib-injection/dl_wheels.py b/lib-injection/dl_wheels.py index e10d8e53e0e..81c5715611d 100755 --- a/lib-injection/dl_wheels.py +++ b/lib-injection/dl_wheels.py @@ -16,6 +16,7 @@ ./dl_wheels.py --help """ + import argparse import itertools import os @@ -41,9 +42,9 @@ ) # Supported Python versions lists all python versions that can install at least one version of the ddtrace library. -supported_versions = ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +supported_versions = ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] supported_arches = ["aarch64", "x86_64", "i686"] -supported_platforms = ["musllinux_1_1", "manylinux2014"] +supported_platforms = ["musllinux_1_2", "manylinux2014"] parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( diff --git a/lib-injection/sources/sitecustomize.py b/lib-injection/sources/sitecustomize.py index dbc68f65ebe..0f87b770edd 100644 --- a/lib-injection/sources/sitecustomize.py +++ b/lib-injection/sources/sitecustomize.py @@ -31,6 +31,7 @@ def parse_version(version): return Version((0, 0), "") +TELEMETRY_DATA = [] SCRIPT_DIR = os.path.dirname(__file__) RUNTIMES_ALLOW_LIST = { "cpython": { @@ -68,49 +69,58 @@ def get_oci_ddtrace_version(): def build_installed_pkgs(): installed_packages = {} if sys.version_info >= (3, 8): - from importlib import metadata as importlib_metadata + try: + from importlib import metadata as importlib_metadata - installed_packages = {pkg.metadata["Name"]: pkg.version for pkg in importlib_metadata.distributions()} + installed_packages = {pkg.metadata["Name"]: pkg.version for pkg in importlib_metadata.distributions()} + except Exception as e: + _log("Failed to build installed packages list: %s" % e, level="debug") else: try: import pkg_resources installed_packages = {pkg.key: pkg.version for pkg in pkg_resources.working_set} - except ImportError: + except Exception: try: import importlib_metadata installed_packages = {pkg.metadata["Name"]: pkg.version for pkg in importlib_metadata.distributions()} - except ImportError: - pass + except Exception as e: + _log("Failed to build installed packages list: %s" % e, level="debug") return {key.lower(): value for key, value in installed_packages.items()} def build_min_pkgs(): min_pkgs = dict() - for location in VERSION_COMPAT_FILE_LOCATIONS: - if os.path.exists(location): - with open(location, "r") as csvfile: - csv_reader = csv.reader(csvfile, delimiter=",") - for idx, row in enumerate(csv_reader): - if idx < 2: - continue - min_pkgs[row[0].lower()] = parse_version(row[1]) - break + try: + for location in VERSION_COMPAT_FILE_LOCATIONS: + if os.path.exists(location): + with open(location, "r") as csvfile: + csv_reader = csv.reader(csvfile, delimiter=",") + for idx, row in enumerate(csv_reader): + if idx < 2: + continue + min_pkgs[row[0].lower()] = parse_version(row[1]) + break + except Exception as e: + _log("Failed to build min-pkgs list: %s" % e, level="debug") return min_pkgs def build_denied_executables(): denied_executables = set() _log("Checking denied-executables list", level="debug") - if os.path.exists(EXECUTABLE_DENY_LOCATION): - with open(EXECUTABLE_DENY_LOCATION, "r") as denyfile: - _log("Found deny-list file", level="debug") - for line in denyfile.readlines(): - cleaned = line.strip("\n") - denied_executables.add(cleaned) - denied_executables.add(os.path.basename(cleaned)) - _log("Built denied-executables list of %s entries" % (len(denied_executables),), level="debug") + try: + if os.path.exists(EXECUTABLE_DENY_LOCATION): + with open(EXECUTABLE_DENY_LOCATION, "r") as denyfile: + _log("Found deny-list file", level="debug") + for line in denyfile.readlines(): + cleaned = line.strip("\n") + denied_executables.add(cleaned) + denied_executables.add(os.path.basename(cleaned)) + _log("Built denied-executables list of %s entries" % (len(denied_executables),), level="debug") + except Exception as e: + _log("Failed to build denied-executables list: %s" % e, level="debug") return denied_executables @@ -228,13 +238,14 @@ def _inject(): global PYTHON_RUNTIME global PKGS_ALLOW_LIST global EXECUTABLES_DENY_LIST + global TELEMETRY_DATA + # Try to get the version of the Python runtime first so we have it for telemetry + PYTHON_VERSION = platform.python_version() + PYTHON_RUNTIME = platform.python_implementation().lower() DDTRACE_VERSION = get_oci_ddtrace_version() INSTALLED_PACKAGES = build_installed_pkgs() - PYTHON_RUNTIME = platform.python_implementation().lower() - PYTHON_VERSION = platform.python_version() PKGS_ALLOW_LIST = build_min_pkgs() EXECUTABLES_DENY_LIST = build_denied_executables() - telemetry_data = [] integration_incomp = False runtime_incomp = False os.environ["_DD_INJECT_WAS_ATTEMPTED"] = "true" @@ -253,19 +264,21 @@ def _inject(): except Exception: _log("user-installed ddtrace not found, configuring application to use injection site-packages") - current_platform = "manylinux2014" if _get_clib() == "gnu" else "musllinux_1_1" + current_platform = "manylinux2014" if _get_clib() == "gnu" else "musllinux_1_2" _log("detected platform %s" % current_platform, level="debug") pkgs_path = os.path.join(SCRIPT_DIR, "ddtrace_pkgs") _log("ddtrace_pkgs path is %r" % pkgs_path, level="debug") _log("ddtrace_pkgs contents: %r" % os.listdir(pkgs_path), level="debug") + abort = False incompatible_sysarg = get_first_incompatible_sysarg() if incompatible_sysarg is not None: _log("Found incompatible executable: %s." % incompatible_sysarg, level="debug") if not FORCE_INJECT: _log("Aborting dd-trace-py instrumentation.", level="debug") - telemetry_data.append( + abort = True + TELEMETRY_DATA.append( create_count_metric( "library_entrypoint.abort.integration", ) @@ -287,9 +300,10 @@ def _inject(): integration_incomp = True if not FORCE_INJECT: _log("Aborting dd-trace-py instrumentation.", level="debug") + abort = True for key, value in incompatible_packages.items(): - telemetry_data.append( + TELEMETRY_DATA.append( create_count_metric( "library_entrypoint.abort.integration", [ @@ -313,15 +327,16 @@ def _inject(): runtime_incomp = True if not FORCE_INJECT: _log("Aborting dd-trace-py instrumentation.", level="debug") + abort = True - telemetry_data.append(create_count_metric("library_entrypoint.abort.runtime")) + TELEMETRY_DATA.append(create_count_metric("library_entrypoint.abort.runtime")) else: _log( "DD_INJECT_FORCE set to True, allowing unsupported runtimes and continuing.", level="debug", ) - if telemetry_data: - telemetry_data.append( + if abort: + TELEMETRY_DATA.append( create_count_metric( "library_entrypoint.abort", [ @@ -329,8 +344,6 @@ def _inject(): ], ) ) - telemetry_event = gen_telemetry_payload(telemetry_data, DDTRACE_VERSION) - send_telemetry(telemetry_event) return site_pkgs_path = os.path.join( @@ -339,6 +352,12 @@ def _inject(): _log("site-packages path is %r" % site_pkgs_path, level="debug") if not os.path.exists(site_pkgs_path): _log("ddtrace site-packages not found in %r, aborting" % site_pkgs_path, level="error") + TELEMETRY_DATA.append( + gen_telemetry_payload( + [create_count_metric("library_entrypoint.abort", ["reason:missing_" + site_pkgs_path])], + DDTRACE_VERSION, + ) + ) return # Add the custom site-packages directory to the Python path to load the ddtrace package. @@ -349,6 +368,17 @@ def _inject(): except BaseException as e: _log("failed to load ddtrace module: %s" % e, level="error") + TELEMETRY_DATA.append( + gen_telemetry_payload( + [ + create_count_metric( + "library_entrypoint.error", ["error_type:import_ddtrace_" + type(e).__name__.lower()] + ) + ], + DDTRACE_VERSION, + ) + ) + return else: try: @@ -377,38 +407,56 @@ def _inject(): os.environ["PYTHONPATH"] = python_path _log("successfully configured ddtrace package, python path is %r" % os.environ["PYTHONPATH"]) - event = gen_telemetry_payload( - [ - create_count_metric( - "library_entrypoint.complete", - [ - "injection_forced:" + str(runtime_incomp or integration_incomp).lower(), - ], - ) - ], - DDTRACE_VERSION, + TELEMETRY_DATA.append( + gen_telemetry_payload( + [ + create_count_metric( + "library_entrypoint.complete", + [ + "injection_forced:" + str(runtime_incomp or integration_incomp).lower(), + ], + ) + ], + DDTRACE_VERSION, + ) ) - send_telemetry(event) except Exception as e: - event = gen_telemetry_payload( - [create_count_metric("library_entrypoint.error", ["error_type:" + type(e).__name__.lower()])], - DDTRACE_VERSION, + TELEMETRY_DATA.append( + gen_telemetry_payload( + [ + create_count_metric( + "library_entrypoint.error", ["error_type:init_ddtrace_" + type(e).__name__.lower()] + ) + ], + DDTRACE_VERSION, + ) ) - send_telemetry(event) _log("failed to load ddtrace.bootstrap.sitecustomize: %s" % e, level="error") return else: module_origin = spec.origin if spec else None _log("user-installed ddtrace found: %s, aborting site-packages injection" % module_origin, level="warning") + TELEMETRY_DATA.append( + create_count_metric( + "library_entrypoint.abort", + [ + "reason:ddtrace_already_present", + ], + ) + ) try: - _inject() -except Exception as e: try: - event = gen_telemetry_payload( - [create_count_metric("library_entrypoint.error", ["error_type:" + type(e).__name__.lower()])] + _inject() + except Exception as e: + TELEMETRY_DATA.append( + gen_telemetry_payload( + [create_count_metric("library_entrypoint.error", ["error_type:main_" + type(e).__name__.lower()])] + ) ) - send_telemetry(event) - except Exception: - pass # absolutely never allow exceptions to propagate to the app + finally: + if TELEMETRY_DATA: + send_telemetry(TELEMETRY_DATA) +except Exception: + pass # absolutely never allow exceptions to propagate to the app diff --git a/pyproject.toml b/pyproject.toml index 75ba723fdcc..da28b04f4e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,13 +23,16 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ - "bytecode>=0.15.0; python_version>='3.12'", + "bytecode>=0.16.0; python_version>='3.13.0'", + "bytecode>=0.15.0; python_version~='3.12.0'", "bytecode>=0.14.0; python_version~='3.11.0'", "bytecode>=0.13.0; python_version<'3.11'", "envier~=0.5", "importlib_metadata<=6.5.0; python_version<'3.8'", + "legacy-cgi>=2.0.0; python_version>='3.13.0'", "opentelemetry-api>=1", "protobuf>=3", "typing_extensions", diff --git a/releasenotes/notes/313-ddtracerun-e34ef8d7496091b3.yaml b/releasenotes/notes/313-ddtracerun-e34ef8d7496091b3.yaml new file mode 100644 index 00000000000..50cf1a7d196 --- /dev/null +++ b/releasenotes/notes/313-ddtracerun-e34ef8d7496091b3.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + Enables tests of the ``ddtrace-run`` entrypoint with Python 3.13 diff --git a/releasenotes/notes/feat-add-azure-functions-integration-108911bfe1e5f081.yaml b/releasenotes/notes/feat-add-azure-functions-integration-108911bfe1e5f081.yaml new file mode 100644 index 00000000000..b9b7b255564 --- /dev/null +++ b/releasenotes/notes/feat-add-azure-functions-integration-108911bfe1e5f081.yaml @@ -0,0 +1,3 @@ +features: + - | + azure_functions: This introduces support for Azure Functions. diff --git a/releasenotes/notes/feat-standalone-sca-billing-925c84d69fe061ce.yaml b/releasenotes/notes/feat-standalone-sca-billing-925c84d69fe061ce.yaml new file mode 100644 index 00000000000..733aaea6262 --- /dev/null +++ b/releasenotes/notes/feat-standalone-sca-billing-925c84d69fe061ce.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + ASM: This introduces "Standalone SCA billing", opting out for APM billing and applying to only SCA. Enable this by setting these two environment variables: ``DD_APPSEC_SCA_ENABLED`` and ``DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED`` diff --git a/releasenotes/notes/fix-appsec-use-unpatched-json-8d09aacad4808ef2.yaml b/releasenotes/notes/fix-appsec-use-unpatched-json-8d09aacad4808ef2.yaml new file mode 100644 index 00000000000..a4784672021 --- /dev/null +++ b/releasenotes/notes/fix-appsec-use-unpatched-json-8d09aacad4808ef2.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + ASM: This fix resolves an issue where AppSec was using a patched JSON loads, creating telemetry errors. diff --git a/releasenotes/notes/fix-celery-closed-spans-34ff43868c1e33b8.yaml b/releasenotes/notes/fix-celery-closed-spans-34ff43868c1e33b8.yaml new file mode 100644 index 00000000000..f16f7b36fed --- /dev/null +++ b/releasenotes/notes/fix-celery-closed-spans-34ff43868c1e33b8.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + tracing(celery): Fixes an issue where ``celery.apply`` spans from Celery prerun got closed too soon leading to span tags being missing. \ No newline at end of file diff --git a/releasenotes/notes/fix-kafka-consumer-parenting-29acfd08e05d2350.yaml b/releasenotes/notes/fix-kafka-consumer-parenting-29acfd08e05d2350.yaml new file mode 100644 index 00000000000..df8cdcfe986 --- /dev/null +++ b/releasenotes/notes/fix-kafka-consumer-parenting-29acfd08e05d2350.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + kafka: Fixes an issue with Kafka consumer spans not using the active trace context when distributed + tracing was enabled and no valid distributed context found was found within a consumed message. diff --git a/releasenotes/notes/fix-lib-injection-telemetry-4fbea5e41ee1ff3e.yaml b/releasenotes/notes/fix-lib-injection-telemetry-4fbea5e41ee1ff3e.yaml new file mode 100644 index 00000000000..cf80f366f5b --- /dev/null +++ b/releasenotes/notes/fix-lib-injection-telemetry-4fbea5e41ee1ff3e.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + lib-injection: Fix missing lib-injection telemetry for common abort scenarios. diff --git a/releasenotes/notes/fix-llmobs-do-not-ignore-global-patch-configs-a2adc4803f55b142.yaml b/releasenotes/notes/fix-llmobs-do-not-ignore-global-patch-configs-a2adc4803f55b142.yaml new file mode 100644 index 00000000000..b080742d74a --- /dev/null +++ b/releasenotes/notes/fix-llmobs-do-not-ignore-global-patch-configs-a2adc4803f55b142.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + LLM Observability: This fix resolves an issue where ``LLMObs.enable()`` ignored global patch configurations, specifically + the ``DD_TRACE__ENABLED`` and ``DD_PATCH_MODULES`` environment variables. diff --git a/releasenotes/notes/fix-profiler-int-ptr-conversion-4377fbd8724eeaec.yaml b/releasenotes/notes/fix-profiler-int-ptr-conversion-4377fbd8724eeaec.yaml new file mode 100644 index 00000000000..cadb50628fa --- /dev/null +++ b/releasenotes/notes/fix-profiler-int-ptr-conversion-4377fbd8724eeaec.yaml @@ -0,0 +1,6 @@ +--- +fixes: + - | + Updates setup.py to ignore int-ptr conversion warnings for the profiler stack.pyx file. + This is important because gcc 14 makes these conversions an error, alpine 3.21.0 ships with gcc 14, + and any patch version of a Python alpine image cut after December 5th, 2024, will have this issue. diff --git a/releasenotes/notes/fix-profiling-memalloc-segfault-5593ad951405a75d.yaml b/releasenotes/notes/fix-profiling-memalloc-segfault-5593ad951405a75d.yaml new file mode 100644 index 00000000000..8632b62af50 --- /dev/null +++ b/releasenotes/notes/fix-profiling-memalloc-segfault-5593ad951405a75d.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Fixes an issue where the memory allocation profiler can cause a segmentation fault due to + data races when accessing its own global data structures from multiple threads. diff --git a/releasenotes/notes/fix-profiling-native-mutices-62440b5a3d9d6c4b.yaml b/releasenotes/notes/fix-profiling-native-mutices-62440b5a3d9d6c4b.yaml new file mode 100644 index 00000000000..40167a974c3 --- /dev/null +++ b/releasenotes/notes/fix-profiling-native-mutices-62440b5a3d9d6c4b.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + profiling: Fixes a bug where profiling mutexes were not cleared on fork in the child process. This could + cause deadlocks in certain configurations. diff --git a/releasenotes/notes/iast-fix-iast-propagation-error-2-ba4a998133269a7c.yaml b/releasenotes/notes/iast-fix-iast-propagation-error-2-ba4a998133269a7c.yaml new file mode 100644 index 00000000000..4918edb17a7 --- /dev/null +++ b/releasenotes/notes/iast-fix-iast-propagation-error-2-ba4a998133269a7c.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + ASM: This fix resolves an issue where AppSec was using a patched request and builtins functions, + creating telemetry errors. \ No newline at end of file diff --git a/releasenotes/notes/langchain-pydantic-output-parsers-19bc162212ec051e.yaml b/releasenotes/notes/langchain-pydantic-output-parsers-19bc162212ec051e.yaml new file mode 100644 index 00000000000..687e465723a --- /dev/null +++ b/releasenotes/notes/langchain-pydantic-output-parsers-19bc162212ec051e.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + langchain: resolves a JSON decoding issue resulting from tagging streamed outputs from chains ending with a PydanticOutputParser. diff --git a/releasenotes/notes/log-warning-on-kinesis-stream-metadata-not-found-a921cabed5d4397e.yaml b/releasenotes/notes/log-warning-on-kinesis-stream-metadata-not-found-a921cabed5d4397e.yaml new file mode 100644 index 00000000000..ed0dda53ea8 --- /dev/null +++ b/releasenotes/notes/log-warning-on-kinesis-stream-metadata-not-found-a921cabed5d4397e.yaml @@ -0,0 +1,3 @@ +fixes: + - | + datastreams: Logs at warning level for Kinesis errors that break the Data Streams Monitoring map. diff --git a/releasenotes/notes/profiling-add-pytorch-integration-0683123b7bb83f99.yaml b/releasenotes/notes/profiling-add-pytorch-integration-0683123b7bb83f99.yaml new file mode 100644 index 00000000000..891e039a204 --- /dev/null +++ b/releasenotes/notes/profiling-add-pytorch-integration-0683123b7bb83f99.yaml @@ -0,0 +1,8 @@ +--- +features: + - | + profiling: Adds an experimental integration with the PyTorch profiler which can be enabled + by setting ``DD_PROFILING_PYTORCH_ENABLED=true``. This feature instruments the PyTorch + profiler API (https://pytorch.org/docs/stable/_modules/torch/profiler/profiler.html) + so that GPU profiling data can be sent to Datadog for visualization. + This feature supports torch version >= 1.8.1. diff --git a/releasenotes/notes/refactor-patch-error-ssi-1a2e9fe206d6d6df.yaml b/releasenotes/notes/refactor-patch-error-ssi-1a2e9fe206d6d6df.yaml new file mode 100644 index 00000000000..8afc2e7595f --- /dev/null +++ b/releasenotes/notes/refactor-patch-error-ssi-1a2e9fe206d6d6df.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + Integrations: Improved error handling for exceptions raised during the startup of ddtrace integrations. This reduces the likelihood of the ddtrace library raising unhandled exceptions. \ No newline at end of file diff --git a/releasenotes/notes/stack_traces_for_iast-cd2c008168f6181e.yaml b/releasenotes/notes/stack_traces_for_iast-cd2c008168f6181e.yaml new file mode 100644 index 00000000000..045552e9b7d --- /dev/null +++ b/releasenotes/notes/stack_traces_for_iast-cd2c008168f6181e.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + Code Security: This introduces stack trace reports for Code Security. diff --git a/releasenotes/notes/telemetry-deadlocks-ea3f457ab0611c8b.yaml b/releasenotes/notes/telemetry-deadlocks-ea3f457ab0611c8b.yaml new file mode 100644 index 00000000000..1fe2739767d --- /dev/null +++ b/releasenotes/notes/telemetry-deadlocks-ea3f457ab0611c8b.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + library: Resolves deadlocks that could occur when sending instrumentation telemetry data after an unhandled exception is raised. diff --git a/releasenotes/notes/threethirteen-d40d659d8939fe5e.yaml b/releasenotes/notes/threethirteen-d40d659d8939fe5e.yaml new file mode 100644 index 00000000000..3a229695abd --- /dev/null +++ b/releasenotes/notes/threethirteen-d40d659d8939fe5e.yaml @@ -0,0 +1,52 @@ +--- +upgrade: + - | + Makes the library compatible with Python 3.13. + + The following limitations currently apply to support for Python 3.13: + - ``ddtrace`` is not supported on Windows with Python 3.13 + - Appsec Threat Detection is not tested against Django, Flask, or FastAPI with 3.13 + - Automatic Service Naming is not tested with 3.13 + - The ``ddtrace-run`` entrypoint is not tested with 3.13 + - The following products are not tested with 3.13: + - Code Coverage + - Appsec IAST + - Data Streams Monitoring + - CI Visibility + - Continuous Profiling + - The following integrations are not tested with 3.13: + - aiobotocore + - aiomysql + - aiopg + - anthropic + - asyncpg + - avro + - botocore + - confluent-kafka + - consul + - django + - falcon + - fastapi + - freezegun + - gevent + - google_generativeai + - grpcio + - gunicorn + - langchain + - mysqlclient + - opentracing + - protobuf + - psycopg + - psycopg2 + - pymysql + - pyodbc + - pytest + - pytest-bdd + - pytest-benchmark + - sanic + - selenium + - sqlalchemy + - sqlite3 + - starlette + - tornado + - vertexai diff --git a/releasenotes/notes/waf_1.22.0-05b1dfbaa0d47059.yaml b/releasenotes/notes/waf_1.22.0-05b1dfbaa0d47059.yaml new file mode 100644 index 00000000000..def80385e74 --- /dev/null +++ b/releasenotes/notes/waf_1.22.0-05b1dfbaa0d47059.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + ASM: This upgrades libddwaf to 1.22.0 diff --git a/riotfile.py b/riotfile.py index 1a1f65ed116..e7a078a5425 100644 --- a/riotfile.py +++ b/riotfile.py @@ -17,7 +17,8 @@ (3, 10), (3, 11), (3, 12), -] + (3, 13), +] # type: List[Tuple[int, int]] def version_to_str(version: Tuple[int, int]) -> str: @@ -70,9 +71,9 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT """Helper to select python versions from the list of versions we support >>> select_pys() - ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] >>> select_pys(min_version='3') - ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] >>> select_pys(max_version='3') [] >>> select_pys(min_version='3.7', max_version='3.9') @@ -116,6 +117,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pys=["3"], pkgs={ "ruamel.yaml": latest, + "lxml": latest, }, ), Venv( @@ -124,6 +126,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pys=["3"], pkgs={ "ruamel.yaml": latest, + "lxml": latest, }, ), Venv( @@ -140,7 +143,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="appsec_iast", - pys=select_pys(), + pys=select_pys(max_version="3.12"), command="pytest -v {cmdargs} tests/appsec/iast/", pkgs={ "requests": latest, @@ -162,7 +165,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="appsec_iast_memcheck", - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), command="pytest {cmdargs} --memray --stacks=35 tests/appsec/iast_memcheck/", pkgs={ "requests": latest, @@ -261,7 +264,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), # Flask 3.x.x Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(min_version="3.8", max_version="3.12"), pkgs={ "flask": "~=3.0", "langchain": "==0.0.354", @@ -394,7 +397,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "msgpack": [latest], "pytest-randomly": latest, }, - pys=select_pys(), + pys=select_pys(max_version="3.12"), venvs=[ Venv( name="datastreams-latest", @@ -583,6 +586,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "vertica-python": ">=0.6.0,<0.7.0", "kombu": ">=4.2.0,<4.3.0", "pytest-randomly": latest, + "requests": latest, }, ), Venv( @@ -604,7 +608,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="falcon", command="pytest {cmdargs} tests/contrib/falcon", - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), pkgs={ "falcon": [ "~=3.0.0", @@ -826,7 +830,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # django started supporting psycopg3 in 4.2 for versions >3.1.8 - pys=select_pys(min_version="3.8"), + pys=select_pys(min_version="3.8", max_version="3.12"), pkgs={ "django": ["~=4.2"], "psycopg": latest, @@ -917,7 +921,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.12"), + pys="3.12", pkgs={ "sqlalchemy": latest, "django": latest, @@ -1206,7 +1210,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"psycopg2-binary": "~=2.8.0"}, ), Venv( - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), # psycopg2-binary added support for Python 3.9/3.10 in 2.9.1 # psycopg2-binary added support for Python 3.11 in 2.9.2 pkgs={"psycopg2-binary": ["~=2.9.2", latest]}, @@ -1230,7 +1234,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.12"), + pys=select_pys(min_version="3.12", max_version="3.12"), pkgs={ "pytest-asyncio": "==0.23.7", }, @@ -1310,7 +1314,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"starlette": ["~=0.21.0", "~=0.33.0", latest]}, ), Venv( - pys=select_pys(min_version="3.12"), + pys="3.12", pkgs={"starlette": latest}, ), ], @@ -1333,7 +1337,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), pkgs={ "sqlalchemy": ["~=1.3.0", latest], "psycopg2-binary": latest, @@ -1434,7 +1438,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(min_version="3.8", max_version="3.12"), pkgs={"botocore": "==1.34.49", "boto3": "==1.34.49"}, ), ], @@ -1503,7 +1507,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"pymysql": "~=0.10"}, ), Venv( - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), pkgs={ "pymysql": [ "~=1.0", @@ -1559,7 +1563,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.12"), + pys=select_pys(min_version="3.12", max_version="3.12"), pkgs={"aiobotocore": latest}, ), ], @@ -1582,14 +1586,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # fastapi added support for Python 3.11 in 0.86.0 - pys=select_pys(min_version="3.11"), + pys=select_pys(min_version="3.11", max_version="3.12"), pkgs={"fastapi": ["~=0.86.0", latest], "anyio": ">=3.4.0,<4.0"}, ), ], ), Venv( name="aiomysql", - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), command="pytest {cmdargs} tests/contrib/aiomysql", pkgs={ "pytest-randomly": latest, @@ -1636,7 +1640,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ], ), Venv( - pys=select_pys(min_version="3.10"), + pys=select_pys(min_version="3.10", max_version="3.12"), pkgs={ "pytest": [ "~=6.0", @@ -1693,9 +1697,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "more_itertools": "<8.11.0", "pytest-randomly": latest, }, - env={ - "DD_PYTEST_USE_NEW_PLUGIN_BETA": "0", - }, venvs=[ Venv( pys=select_pys(min_version="3.7", max_version="3.9"), @@ -1706,9 +1707,21 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ">=6.0,<6.1", ] }, + venvs=[ + Venv( + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "0", + }, + ), + Venv( + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "1", + }, + ), + ], ), Venv( - pys=select_pys(min_version="3.10"), + pys=select_pys(min_version="3.10", max_version="3.12"), pkgs={ "pytest-bdd": [ ">=4.0,<5.0", @@ -1716,31 +1729,49 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ">=6.0,<6.1", ] }, + venvs=[ + Venv( + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "0", + }, + ), + Venv( + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "1", + }, + ), + ], ), ], ), Venv( name="pytest-benchmark", + pys=select_pys(min_version="3.7", max_version="3.12"), command="pytest {cmdargs} --no-ddtrace --no-cov tests/contrib/pytest_benchmark/", pkgs={ "msgpack": latest, "pytest-randomly": latest, }, - env={ - "DD_PYTEST_USE_NEW_PLUGIN_BETA": "0", - }, venvs=[ Venv( - venvs=[ - Venv( - pys=select_pys(min_version="3.7", max_version="3.10"), - pkgs={ - "pytest-benchmark": [ - ">=3.1.0,<=4.0.0", - ] - }, - ) - ], + pkgs={ + "pytest-benchmark": [ + ">=3.1.0,<=4.0.0", + ] + }, + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "0", + }, + ), + Venv( + pkgs={ + "pytest-benchmark": [ + ">=3.1.0,<=4.0.0", + ] + }, + env={ + "DD_PYTEST_USE_NEW_PLUGIN_BETA": "1", + }, ), ], ), @@ -1771,7 +1802,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # grpcio added support for Python 3.12 in 1.59 - pys=select_pys(min_version="3.12"), + pys=select_pys(min_version="3.12", max_version="3.12"), pkgs={ "grpcio": ["~=1.59.0", latest], "pytest-asyncio": "==0.23.7", @@ -1973,7 +2004,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.7"), + pys=select_pys(min_version="3.7", max_version="3.12"), pkgs={ "aiopg": ["~=1.0", "~=1.4.0"], }, @@ -2218,7 +2249,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.12"), + pys="3.12", pkgs={ "sanic": [latest], "sanic-testing": "~=22.3.0", @@ -2293,7 +2324,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"asyncpg": ["~=0.27", latest]}, ), Venv( - pys=select_pys(min_version="3.12"), + pys=select_pys(min_version="3.12", max_version="3.12"), pkgs={"asyncpg": [latest]}, ), ], @@ -2326,7 +2357,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # sqlite3 is tied to the Python version and is not installable via pip # To test a range of versions without updating Python, we use Linux only pysqlite3-binary package # Remove pysqlite3-binary on Python 3.9+ locally on non-linux machines - Venv(pys=select_pys(min_version="3.9"), pkgs={"pysqlite3-binary": [latest]}), + Venv(pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"pysqlite3-binary": [latest]}), Venv(pys=select_pys(max_version="3.8"), pkgs={"importlib-metadata": latest}), ], ), @@ -2391,7 +2422,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="consul", - pys=select_pys(), + pys=select_pys(max_version="3.12"), command="pytest {cmdargs} tests/contrib/consul", pkgs={ "python-consul": [ @@ -2505,8 +2536,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"gevent": latest}, ), Venv( - pys=select_pys(min_version="3.12"), - pkgs={"gevent": latest}, + pys="3.12", + pkgs={"gevent": "~=23.9.0"}, ), ], ), @@ -2528,7 +2559,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # pyodbc added support for Python 3.11 in 4.0.35 - pys=select_pys(min_version="3.11"), + pys=select_pys(min_version="3.11", max_version="3.12"), pkgs={"pyodbc": [latest]}, ), ], @@ -2595,7 +2626,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # tornado added support for Python 3.10 in 6.2 - pys=select_pys(min_version="3.10"), + pys=select_pys(min_version="3.10", max_version="3.12"), pkgs={"tornado": ["==6.2", "==6.3.1"]}, ), ], @@ -2611,7 +2642,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # mysqlclient added support for Python 3.9/3.10 in 2.1 - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"mysqlclient": ["~=2.1", latest]}, ), ], @@ -2680,7 +2711,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "cohere": latest, "anthropic": "==0.26.0", }, - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), ), Venv( pkgs={ @@ -2698,14 +2729,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "botocore": latest, "cohere": latest, }, - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), ), ], ), Venv( name="anthropic", command="pytest {cmdargs} tests/contrib/anthropic", - pys=select_pys(min_version="3.8"), + pys=select_pys(min_version="3.8", max_version="3.12"), pkgs={ "pytest-asyncio": latest, "vcrpy": latest, @@ -2715,7 +2746,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="google_generativeai", command="pytest {cmdargs} tests/contrib/google_generativeai", - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pytest-asyncio": latest, "google-generativeai": [latest], @@ -2727,7 +2758,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="vertexai", command="pytest {cmdargs} tests/contrib/vertexai", - pys=select_pys(min_version="3.9"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pytest-asyncio": latest, "vertexai": [latest], @@ -2791,7 +2822,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"confluent-kafka": ["~=1.9.2", latest]}, ), # confluent-kafka added support for Python 3.11 in 2.0.2 - Venv(pys=select_pys(min_version="3.11"), pkgs={"confluent-kafka": latest}), + Venv(pys=select_pys(min_version="3.11", max_version="3.12"), pkgs={"confluent-kafka": latest}), ], ), ], @@ -2808,6 +2839,15 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "envier": "==0.5.2", }, ), + Venv( + name="azure_functions", + command="pytest {cmdargs} tests/contrib/azure_functions", + pys=select_pys(min_version="3.7", max_version="3.11"), + pkgs={ + "azure.functions": latest, + "requests": latest, + }, + ), Venv( name="sourcecode", command="pytest {cmdargs} tests/sourcecode", @@ -2820,7 +2860,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="ci_visibility", command="pytest --no-ddtrace {cmdargs} tests/ci_visibility", - pys=select_pys(), + pys=select_pys(max_version="3.12"), pkgs={ "msgpack": latest, "coverage": latest, @@ -2965,7 +3005,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), # Python 3.12 Venv( - pys=select_pys(min_version="3.12"), + pys="3.12", pkgs={"uwsgi": latest}, venvs=[ Venv( @@ -3105,7 +3145,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), # Python 3.12 Venv( - pys=select_pys(min_version="3.12"), + pys="3.12", pkgs={"uwsgi": latest}, venvs=[ Venv( diff --git a/scripts/cformat.sh b/scripts/cformat.sh index b7d4fe46a2f..b12119e639a 100755 --- a/scripts/cformat.sh +++ b/scripts/cformat.sh @@ -1,43 +1,95 @@ -#!/bin/bash +#!/usr/bin/env bash set -e -# For more modern versions: -# clang-format --dry-run -Werror file.c -# would be enough… - -clean () -{ - rm -f "$CFORMAT_TMP" +clean() { + rm -f "$CFORMAT_TMP" 2>/dev/null || true } trap clean EXIT -if [[ "$1" == "update" ]] -then - THIS_PATH="$(realpath "$0")" - THIS_DIR="$(dirname $(dirname "$THIS_PATH"))" - # Find .c, , .h, .cpp, and .hpp files, excluding specified directories - find "$THIS_DIR" -type f \( -name '*.c' -o -name '*.h' -o -name '*.cpp' -o -name '*.hpp' \) \ - | grep -v '.eggs/' \ - | grep -v 'dd-trace-py/build/' \ - | grep -v '_taint_tracking/CMakeFiles' \ - | grep -v '_taint_tracking/_deps/' \ - | grep -v '.riot/' \ - | grep -v 'ddtrace/vendor/' \ - | grep -v '_taint_tracking/_vendor/' \ - | grep -v 'ddtrace/appsec/_iast/_taint_tracking/cmake-build-debug/' \ - | grep -v '^ddtrace/appsec/_iast/_taint_tracking/_vendor/' \ - | while IFS= read -r file; do - clang-format -i $file - echo "Formatting $file" +# Exclude patterns applied to file list +exclude_patterns() { + local patterns=( + '^ddtrace/vendor/' + '^ddtrace/appsec/_iast/_taint_tracking/_vendor/' + '.eggs/' + 'dd-trace-py/build/' + '_taint_tracking/CMakeFiles' + '_taint_tracking/_deps/' + '.riot/' + '_taint_tracking/_vendor/' + 'ddtrace/appsec/_iast/_taint_tracking/cmake-build-debug/' + ) + + # Join all patterns with '|' + local joined="$(IFS='|'; echo "${patterns[*]}")" + + grep -vE "${joined}" +} + +# Function to enumerate files depending on mode +enumerate_files() { + local extensions=( + '*.c' + '*.h' + '*.cpp' + '*.hpp' + ) + + if [[ "$ENUM_ALL" == "true" ]]; then + local find_conditions=() + for ext in "${extensions[@]}"; do + find_conditions+=("-o" "-name" "$ext") + done + unset 'find_conditions[-1]' + find "$BASE_DIR" -type f \( "${find_conditions[@]}" \) + else + git ls-files "${extensions[@]}" + fi +} + +# Script defaults +UPDATE_MODE=false +ENUM_ALL=false +BASE_DIR=$(dirname "$(realpath "$0")") +CLANG_FORMAT=clang-format + +# NB: consumes the arguments +while (( "$#" )); do + case "$1" in + --fix|-fix|fix) + UPDATE_MODE="true" + ;; + --all|-all|all) + ENUM_ALL="true" + ;; + *) + ;; + esac done + +# Environment variable overrides +[[ -n "${CFORMAT_FIX:-}" ]] && UPDATE_MODE=true +[[ -n "${CFORMAT_ALL:-}" ]] && ENUM_ALL=true +[[ -n "${CFORMAT_BIN:-}" ]] && CLANG_FORMAT="$CLANG_FORMAT_BIN" + +if [[ "$UPDATE_MODE" == "true" ]]; then + # Update mode: Format files in-place + enumerate_files \ + | exclude_patterns \ + | while IFS= read -r file; do + ${CLANG_FORMAT} -i "$file" + echo "Formatting $file" + done else - git ls-files '*.c' '*.h' '*.cpp' '*.hpp' | grep -v '^ddtrace/vendor/' | grep -v '^ddtrace/appsec/_iast/_taint_tracking/_vendor/' | while read filename - do - CFORMAT_TMP=`mktemp` - clang-format "$filename" > "$CFORMAT_TMP" - diff -u "$filename" "$CFORMAT_TMP" - rm -f "$CFORMAT_TMP" - done + # Check mode: Compare formatted output to existing files + enumerate_files \ + | exclude_patterns \ + | while IFS= read -r filename; do + CFORMAT_TMP=$(mktemp) + ${CLANG_FORMAT} "$filename" > "$CFORMAT_TMP" + diff -u "$filename" "$CFORMAT_TMP" || true + rm -f "$CFORMAT_TMP" + done fi diff --git a/scripts/cmake-format.sh b/scripts/cmake-format.sh index 401f4d07fd5..0c272e4eb32 100755 --- a/scripts/cmake-format.sh +++ b/scripts/cmake-format.sh @@ -1,26 +1,50 @@ -#!/bin/bash +#!/usr/bin/env bash set -e -# Navigate to the root of the repository, which is one level up from the directory containing this script. -SCRIPT_ROOT="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd)" -cd "$SCRIPT_ROOT/.." - -# Set some options for cmake-format -# If --update is passed as first arg, or if CMAKE_FORMAT_FIX_ALL is set, update the files in place +# Script defaults +UPDATE_MODE=false +ENUM_ALL=false CMD_OPT="--check" -if [[ "${1:-}" == "--update" || -n "${CMAKE_FORMAT_FIX_ALL:-}" ]]; then - CMD_OPT="--in-place" -fi +BASE_DIR=$(dirname "$(realpath "$0")") +CMAKE_FORMAT="cmake-format" + +# NB: consumes the arguments +while (( "$#" )); do + case "$1" in + --fix|-fix|fix) + UPDATE_MODE=true + ;; + --all|-all|all) + ENUM_ALL=true + ;; + *) + ;; + esac + shift +done + +# Environment variable overrides +[[ -n "${CMAKE_FORMAT_FIX:-}" || "$UPDATE_MODE" == "true" ]] && CMD_OPT="--in-place" +[[ -n "${CMAKE_FORMAT_ALL:-}" ]] && ENUM_ALL=true +[[ -n "${CMAKE_FORMAT_BIN:-}" ]] && CMAKE_FORMAT="$CMAKE_FORMAT_BIN" -# If the CMAKE_FORMAT_CHECK_ALL environnment variable is truthy, check all files -# else, just check the files that have been modified -if [[ -n "${CMAKE_FORMAT_CHECK_ALL:-}" ]]; then - FILES=$(find . -name '*.cmake' -o -name 'CMakeLists.txt' | grep -vE '^./build/' | grep -vE '_vendor/') -else - FILES=$(git diff --name-only HEAD | grep -E '\.cmake$|CMakeLists.txt' | grep -vE '^build/' | grep -vE '_vendor/' || true) -fi +# Enumerate files function +enumerate_files() { + if [[ "$ENUM_ALL" == true ]]; then + find $BASE_DIR \( -name '*.cmake' -o -name 'CMakeLists.txt' \) + else + git ls-files \ + | grep -E '\.cmake$|CMakeLists.txt' || true + fi +} + +# Enumerate and filter files +FILES=$(enumerate_files | grep -vE '^(\./)?build/' | grep -vE '_vendor/') # Run cmake-format on all files -for file in $FILES; do - cmake-format -c "scripts/.cmake-format" $CMD_OPT "$file" -done +# Use a process substitution to allow iterating safely +while IFS= read -r file; do + [[ -n "$file" ]] || continue + ${CMAKE_FORMAT} -c "scripts/.cmake-format" $CMD_OPT "$file" +done <<< "$FILES" + diff --git a/scripts/freshvenvs.py b/scripts/freshvenvs.py index 61a381d8fa6..13cd81a6fcc 100644 --- a/scripts/freshvenvs.py +++ b/scripts/freshvenvs.py @@ -4,6 +4,7 @@ from http.client import HTTPSConnection from io import StringIO import json +from operator import itemgetter import os import pathlib import sys @@ -21,7 +22,9 @@ CONTRIB_ROOT = pathlib.Path("ddtrace/contrib") LATEST = "" +excluded = {"coverage"} suite_to_package = { + "kafka": "confluent-kafka", "consul": "python-consul", "snowflake": "snowflake-connector-python", "flask_cache": "flask-caching", @@ -30,11 +33,35 @@ "asyncio": "pytest-asyncio", "sqlite3": "pysqlite3-binary", "grpc": "grpcio", + "google_generativeai": "google-generativeai", "psycopg2": "psycopg2-binary", "cassandra": "cassandra-driver", "rediscluster": "redis-py-cluster", + "dogpile_cache": "dogpile-cache", + "vertica": "vertica_python", } + +# mapping the name of the module to the name of the package (on pypi and as defined in lockfiles) +mapping_module_to_package = { + "confluent_kafka": "confluent-kafka", + "snowflake": "snowflake-connector-python", + "cassandra": "cassandra-driver", + "rediscluster": "redis-py-cluster", + "vertica_python": "vertica-python", + "flask_cache": "flask-cache", + "flask_caching": "flask-caching", + "consul": "python-consul", + "grpc": "grpcio", + "graphql": "graphql-core", + "mysql": "pymysql", +} + + +supported_versions = [] # list of dicts +pinned_packages = set() + + class Capturing(list): def __enter__(self): self._stdout = sys.stdout @@ -77,14 +104,16 @@ def _get_riot_envs_including_any(modules: typing.Set[str]) -> typing.Set[str]: with open(f".riot/requirements/{item}", "r") as lockfile: lockfile_content = lockfile.read() for module in modules: - if module in lockfile_content: + if module in lockfile_content or ( + module in suite_to_package and suite_to_package[module] in lockfile_content + ): envs |= {item.split(".")[0]} break return envs def _get_updatable_packages_implementing(modules: typing.Set[str]) -> typing.Set[str]: - """Return all packages that can be updated and have contribs implemented for them""" + """Return all packages have contribs implemented for them""" all_venvs = riotfile.venv.venvs for v in all_venvs: @@ -92,12 +121,18 @@ def _get_updatable_packages_implementing(modules: typing.Set[str]) -> typing.Set if package not in modules: continue if not _venv_sets_latest_for_package(v, package): - modules.remove(package) + pinned_packages.add(package) packages = {m for m in modules if "." not in m} return packages +def _get_all_modules(modules: typing.Set[str]) -> typing.Set[str]: + """Return all packages have contribs implemented for them""" + contrib_modules = {m for m in modules if "." not in m} + return contrib_modules + + def _get_version_extremes(package_name: str) -> typing.Tuple[Optional[str], Optional[str]]: """Return the (earliest, latest) supported versions of a given package""" with Capturing() as output: @@ -134,16 +169,27 @@ def _get_version_extremes(package_name: str) -> typing.Tuple[Optional[str], Opti def _get_package_versions_from(env: str, packages: typing.Set[str]) -> typing.List[typing.Tuple[str, str]]: - """Return the list of package versions that are tested""" + """Return the list of package versions that are tested, related to the modules""" + # Returns [(package, version), (package, versions)] lockfile_content = pathlib.Path(f".riot/requirements/{env}.txt").read_text().splitlines() lock_packages = [] for line in lockfile_content: package, _, versions = line.partition("==") + # remap the package -> module name if package in packages: lock_packages.append((package, versions)) + return lock_packages +def _is_module_autoinstrumented(module: str) -> bool: + import importlib + + _monkey = importlib.import_module("ddtrace._monkey") + PATCH_MODULES = getattr(_monkey, "PATCH_MODULES") + + return module in PATCH_MODULES and PATCH_MODULES[module] + def _versions_fully_cover_bounds(bounds: typing.Tuple[str, str], versions: typing.List[str]) -> bool: """Return whether the tested versions cover the full range of supported versions""" if not versions: @@ -173,12 +219,25 @@ def _venv_sets_latest_for_package(venv: riotfile.Venv, suite_name: str) -> bool: return False -def main(): - all_required_modules = _get_integrated_modules() - all_required_packages = _get_updatable_packages_implementing(all_required_modules) - envs = _get_riot_envs_including_any(all_required_modules) +def _get_all_used_versions(envs, packages) -> dict: + # Returns dict(module, set(versions)) for a venv, as defined in riotfiles. + all_used_versions = defaultdict(set) + for env in envs: + versions_used = _get_package_versions_from(env, packages) # returns list of (package, versions) + for package, version in versions_used: + all_used_versions[package].add(version) + return all_used_versions + +def _get_version_bounds(packages) -> dict: + # Return dict(module: (earliest, latest)) of the module on PyPI bounds = dict() + for package in packages: + earliest, latest = _get_version_extremes(package) + bounds[package] = (earliest, latest) + return bounds + +def output_outdated_packages(all_required_packages, envs, bounds): for package in all_required_packages: earliest, latest = _get_version_extremes(package) bounds[package] = (earliest, latest) @@ -194,10 +253,55 @@ def main(): if not ordered: continue if not _versions_fully_cover_bounds(bounds[package], ordered): - print( - f"{package}: policy supports version {bounds[package][0]} through {bounds[package][1]} " - f"but only these versions are used: {[str(v) for v in ordered]}" - ) + print(f"{package}") + +def generate_supported_versions(contrib_packages, all_used_versions, patched): + for mod in mapping_module_to_package: + contrib_packages.remove(mod) + contrib_packages.add(mapping_module_to_package[mod]) + patched[mapping_module_to_package[mod]] = _is_module_autoinstrumented(mod) + + # Generate supported versions + for package in contrib_packages: + ordered = sorted([Version(v) for v in all_used_versions[package]], reverse=True) + if not ordered: + continue + json_format = { + "integration": package, + "minimum_tracer_supported": str(ordered[-1]), + "max_tracer_supported": str(ordered[0]), + } + + if package in pinned_packages: + json_format["pinned"] = "true" + + if package not in patched: + patched[package] = _is_module_autoinstrumented(package) + json_format["auto-instrumented"] = patched[package] + supported_versions.append(json_format) + + supported_versions_output = sorted(supported_versions, key=itemgetter("integration")) + with open("supported_versions_output.json", "w") as file: + json.dump(supported_versions_output, file, indent=4) + +def main(): + all_required_modules = _get_integrated_modules() + all_required_packages = _get_updatable_packages_implementing(all_required_modules) # these are MODULE names + contrib_modules = _get_all_modules(all_required_modules) + envs = _get_riot_envs_including_any(all_required_modules) + patched = {} + + contrib_packages = contrib_modules + all_used_versions = _get_all_used_versions(envs, contrib_packages) + bounds = _get_version_bounds(contrib_packages) + + if len(sys.argv) != 2: + print("usage: python scripts/freshvenvs.py or ") + return + if sys.argv[1] == "output": + output_outdated_packages(all_required_packages, envs, bounds) + if sys.argv[1] == "generate": + generate_supported_versions(contrib_packages, all_used_versions, patched) if __name__ == "__main__": diff --git a/scripts/gen_circleci_config.py b/scripts/gen_circleci_config.py index 0c7c8344e58..627a3715427 100644 --- a/scripts/gen_circleci_config.py +++ b/scripts/gen_circleci_config.py @@ -17,10 +17,9 @@ def gen_required_suites(template: dict) -> None: required_suites = template["requires_tests"]["requires"] = [] for_each_testrun_needed( suites=sorted( - set(n.rpartition("::")[-1] for n, s in get_suites().items() if not s.get("skip", False)) - & set(template["jobs"].keys()) + set(n for n, s in get_suites().items() if not s.get("skip", False)) & set(template["jobs"].keys()) ), - action=lambda suite: required_suites.append(suite), + action=lambda suite: required_suites.append(suite.rpartition("::")[-1]), git_selections=extract_git_commit_selections(os.getenv("GIT_COMMIT_DESC", "")), ) @@ -52,7 +51,7 @@ def check(name: str, command: str, paths: t.Set[str]) -> None: check( name="Style", command="hatch run lint:style", - paths={"docker*", "*.py", "*.pyi", "hatch.toml", "pyproject.toml", "*.cpp", "*.h"}, + paths={"docker*", "*.py", "*.pyi", "hatch.toml", "pyproject.toml", "*.cpp", "*.h", "CMakeLists.txt"}, ) check( name="Typing", diff --git a/scripts/gen_gitlab_config.py b/scripts/gen_gitlab_config.py index 2b139ce798d..8dc9e5b178f 100644 --- a/scripts/gen_gitlab_config.py +++ b/scripts/gen_gitlab_config.py @@ -15,7 +15,7 @@ class JobSpec: runner: str pattern: t.Optional[str] = None snapshot: bool = False - services: t.Optional[t.Set[str]] = None + services: t.Optional[t.List[str]] = None env: t.Optional[t.Dict[str, str]] = None parallelism: t.Optional[int] = None retry: t.Optional[int] = None @@ -32,16 +32,25 @@ def __str__(self) -> str: lines.append(f"{self.name}:") lines.append(f" extends: {base}") - if self.services: + services = set(self.services or []) + if services: lines.append(" services:") - _services = [f"!reference [.services, {_}]" for _ in self.services] + _services = [f"!reference [.services, {_}]" for _ in services] if self.snapshot: _services.insert(0, f"!reference [{base}, services]") for service in _services: lines.append(f" - {service}") + wait_for: t.Set[str] = services.copy() + if self.snapshot: + wait_for.add("testagent") + if wait_for: + lines.append(" before_script:") + lines.append(f" - !reference [{base}, before_script]") + lines.append(f" - riot -v run -s --pass-env wait -- {' '.join(wait_for)}") + env = self.env if not env or "SUITE_NAME" not in env: env = env or {} @@ -86,10 +95,7 @@ def gen_required_suites() -> None: circleci_jobs = set(circleci_config["jobs"].keys()) # Copy the template file - TESTS_GEN.write_text( - (GITLAB / "tests.yml").read_text().replace(r"{{services.yml}}", (GITLAB / "services.yml").read_text()) - ) - + TESTS_GEN.write_text((GITLAB / "tests.yml").read_text()) # Generate the list of suites to run with TESTS_GEN.open("a") as f: for suite in required_suites: @@ -114,7 +120,7 @@ def check(name: str, command: str, paths: t.Set[str]) -> None: with TESTS_GEN.open("a") as f: print(f'"{name}":', file=f) print(" extends: .testrunner", file=f) - print(" stage: tests", file=f) + print(" stage: precheck", file=f) print(" needs: []", file=f) print(" script:", file=f) print(f" - {command}", file=f) @@ -154,16 +160,6 @@ def check(name: str, command: str, paths: t.Set[str]) -> None: command="hatch run lint:suitespec-check", paths={"*"}, ) - check( - name="conftest", - command="hatch run meta-testing:meta-testing", - paths={"**conftest.py"}, - ) - check( - name="slotscheck", - command="hatch run slotscheck:_", - paths={"**.py"}, - ) # ----------------------------------------------------------------------------- diff --git a/scripts/generate_table.py b/scripts/generate_table.py new file mode 100644 index 00000000000..1d7569b3e63 --- /dev/null +++ b/scripts/generate_table.py @@ -0,0 +1,24 @@ +import csv +import json + + +print("Reading supported_versions_output.json") + +with open("supported_versions_output.json", "r") as json_file: + data = json.load(json_file) + +columns = ["integration", "minimum_tracer_supported", "max_tracer_supported", "auto-instrumented"] +csv_rows = [] + +for entry in data: + integration_name = entry.get("integration", "") + if entry.get("pinned", "").lower() == "true": + integration_name += " *" + entry["integration"] = integration_name + csv_rows.append({col: entry.get(col, "") for col in columns}) + +with open("supported_versions_table.csv", "w", newline="") as csv_file: + print("Wrote to supported_versions_table.csv") + writer = csv.DictWriter(csv_file, fieldnames=columns) + writer.writeheader() + writer.writerows(csv_rows) diff --git a/scripts/needs_testrun.py b/scripts/needs_testrun.py index 99ebba2c18c..01ba87299bd 100755 --- a/scripts/needs_testrun.py +++ b/scripts/needs_testrun.py @@ -8,7 +8,6 @@ import logging import os from pathlib import Path -import re from subprocess import check_output import sys import typing as t @@ -16,6 +15,8 @@ from urllib.request import Request from urllib.request import urlopen +from lxml import html + sys.path.insert(0, str(Path(__file__).parents[1])) @@ -26,20 +27,34 @@ LOGGER = logging.getLogger(__name__) -BASE_BRANCH_PATTERN = re.compile(r':([^<]+)') - @cache def get_base_branch(pr_number: int) -> str: """Get the base branch of a PR - >>> get_base_branch(6412) + >>> import vcr + >>> with vcr.use_cassette( + ... "scripts/vcr/needs_testrun.yaml", + ... filter_headers=["authorization", "user-agent"], + ... record_mode="none"): + ... get_base_branch(6412) + ... get_base_branch(11534) + ... get_base_branch(11690) '1.x' + '2.15' + 'main' """ pr_page_content = urlopen(f"https://github.com/DataDog/dd-trace-py/pull/{pr_number}").read().decode("utf-8") - return BASE_BRANCH_PATTERN.search(pr_page_content).group(1) + tree = html.fromstring(pr_page_content) + base_ref = tree.find_class("base-ref") + if base_ref: + ref = base_ref[0].text_content().strip() + # We might have `DataDog:1.x` or `DataDog:main` so we need to strip the prefix + _, _, ref = ref.rpartition(":") + return ref.strip() + return "main" @cache @@ -116,7 +131,12 @@ def get_changed_files(pr_number: int, sha: t.Optional[str] = None) -> t.Set[str] or if there is a specific SHA given, use the less accurate method of diffing against a base commit, either the given SHA or the merge-base. - >>> sorted(get_changed_files(6388)) # doctest: +NORMALIZE_WHITESPACE + >>> import vcr + >>> with vcr.use_cassette( + ... "scripts/vcr/needs_testrun.yaml", + ... filter_headers=["authorization", "user-agent"], + ... record_mode="none"): + ... sorted(get_changed_files(6388)) # doctest: +NORMALIZE_WHITESPACE ['ddtrace/debugging/_expressions.py', 'releasenotes/notes/fix-debugger-expressions-none-literal-30f3328d2e386f40.yaml', 'tests/debugging/test_expressions.py'] @@ -141,12 +161,19 @@ def get_changed_files(pr_number: int, sha: t.Optional[str] = None) -> t.Set[str] def needs_testrun(suite: str, pr_number: int, sha: t.Optional[str] = None) -> bool: """Check if a testrun is needed for a suite and PR - >>> needs_testrun("debugger", 6485) + >>> import vcr + >>> with vcr.use_cassette( + ... "scripts/vcr/needs_testrun.yaml", + ... filter_headers=["authorization", "user-agent"], + ... record_mode="none"): + ... needs_testrun("debugger", 6485) + ... needs_testrun("debugger", 6388) + ... needs_testrun("foobar", 6412) + ... needs_testrun("profile", 11690) True - >>> needs_testrun("debugger", 6388) True - >>> needs_testrun("foobar", 6412) True + False """ if "itr:noskip" in get_latest_commit_message().lower(): return True @@ -243,8 +270,13 @@ def extract_git_commit_selections(git_commit_message: str) -> t.Set[str]: def main() -> bool: argp = ArgumentParser() + try: + default_pr_number = _get_pr_number() + except RuntimeError: + default_pr_number = None + argp.add_argument("suite", help="The suite to use", type=str) - argp.add_argument("--pr", help="The PR number", type=int, default=_get_pr_number()) + argp.add_argument("--pr", help="The PR number", type=int, default=default_pr_number) argp.add_argument( "--sha", help="Commit hash to use as diff base (defaults to PR merge root)", type=lambda v: v or None ) @@ -255,6 +287,9 @@ def main() -> bool: if args.verbose: LOGGER.setLevel(logging.INFO) + if not args.pr: + raise RuntimeError("Could not determine PR number") + return needs_testrun(args.suite, args.pr, sha=args.sha) diff --git a/scripts/regenerate-riot-latest.sh b/scripts/regenerate-riot-latest.sh index f0e68938a27..423a0524891 100755 --- a/scripts/regenerate-riot-latest.sh +++ b/scripts/regenerate-riot-latest.sh @@ -3,7 +3,7 @@ set -e DDTEST_CMD=scripts/ddtest -pkgs=$(python scripts/freshvenvs.py | cut -d':' -f1) +pkgs=$(python scripts/freshvenvs.py output) echo $pkgs if ! $DDTEST_CMD; then @@ -20,7 +20,8 @@ for pkg in ${pkgs[*]}; do echo "No riot hashes found for pattern: $VENV_NAME" else echo "VENV_NAME=$VENV_NAME" >> $GITHUB_ENV - for h in ${RIOT_HASHES[@]}; do + for h in ${RIOT_HASHES[@]}; do + echo "Removing riot lockfiles" rm ".riot/requirements/${h}.txt" done scripts/compile-and-prune-test-requirements diff --git a/scripts/run-test-suite b/scripts/run-test-suite index cca6bb5262e..8664decde25 100755 --- a/scripts/run-test-suite +++ b/scripts/run-test-suite @@ -47,7 +47,7 @@ set -e if ! [[ -v CIRCLECI && $CIRCLE_BRANCH =~ main ]]; then if [[ -f "$CHECKPOINT_FILENAME" ]]; then latest_success_commit=$(cat $CHECKPOINT_FILENAME) - if ! ./scripts/needs_testrun.py $CIRCLE_JOB --sha $latest_success_commit; then + if ! hatch run scripts:needs_testrun $CIRCLE_JOB --sha $latest_success_commit; then echo "The $CIRCLE_JOB job succeeded at commit $latest_success_commit." echo "None of the changes on this branch since that commit affect the $CIRCLE_JOB job." echo "Skipping this job." diff --git a/scripts/run-test-suite-hatch b/scripts/run-test-suite-hatch index 0a8d1c1d765..38754de9ac9 100755 --- a/scripts/run-test-suite-hatch +++ b/scripts/run-test-suite-hatch @@ -33,7 +33,7 @@ set -e if ! [[ -v CIRCLECI && $CIRCLE_BRANCH =~ main ]]; then if [[ -f "$CHECKPOINT_FILENAME" ]]; then latest_success_commit=$(cat $CHECKPOINT_FILENAME) - if ! ./scripts/needs_testrun.py $CIRCLE_JOB --sha $latest_success_commit; then + if ! hatch run scripts:needs_testrun $CIRCLE_JOB --sha $latest_success_commit; then echo "The $CIRCLE_JOB job succeeded at commit $latest_success_commit." echo "None of the changes on this branch since that commit affect the $CIRCLE_JOB job." echo "Skipping this job." diff --git a/scripts/vcr/needs_testrun.yaml b/scripts/vcr/needs_testrun.yaml new file mode 100644 index 00000000000..f68dca107eb --- /dev/null +++ b/scripts/vcr/needs_testrun.yaml @@ -0,0 +1,23996 @@ +interactions: +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/6412 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n ci: run the debugger suite only if necessary by P403n1x87 + \xB7 Pull Request #6412 \xB7 DataDog/dd-trace-py \xB7 GitHub\n\n\n\n + \ \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n ci: + run the debugger suite only if necessary\n #6412\n

\n
\n
\n\n
\n
\n \n + \ Merged\n\n
\n\n\n\n\n + \
\n P403n1x87\n + \ merged 7 commits into\n\n\n DataDog:1.x\n\nfrom\n\nP403n1x87:ci/debugger-suitespec\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Jul 25, + 2023\n\n\n
\n
\n\n\n \n\n\n\n
\n
\n
\n
\n + \
\n \n Merged\n\n + \
\n\n\n\n\n
\n + \

\n \n ci: run the debugger suite only if necessary\n \n + \ #6412\n

\n\n + \
\n P403n1x87\n merged 7 commits into\n\n\n DataDog:1.x\n\nfrom\n\nP403n1x87:ci/debugger-suitespec\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Jul 25, + 2023\n\n\n
\n
\n
\n + \
\n
\n
\n
\n
\n\n\n\n + \ \n + \ \n\n\n + \ \n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"P403n1x87\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \"@P403n1x87\"\n\n \n + \ P403n1x87\n \n\n \n\n \n\n commented\n\n\n + \ Jul + 20, 2023\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited by majorgreys\n + \ \n \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

We introduce the concept + of suitespec as a way of describing how sources affect test runs. We use it + to ensure that the debugger tests run only if sources that the suite depends + on are modified by the current commit.

\n

Suitespec Implementation + Details

\n

The suitespec solution is based on a manual + configuration of of test suites. To simplify the declaration of file patterns + for test suites, one can make use of components, which essentially + are a logic collection of patterns. Test suite can then be declared as a list + of components to reflect their dependencies on these logic parts, and to DRY + the declaration itself by avoiding repetitions.

\n

Notes

\n
    \n
  • When the script fails for any reason, tests are run.
  • \n
  • It + is important that path patterns are listed correctly, or some tests might + not run when they are in fact supposed to.
  • \n
  • Best effort to determine + the correct list of changed files via the GitHub REST API. When that fails, + we fall back to the less accurate git diff + against the target branch.
  • \n
\n

Checklist

\n
    \n
  • Change(s) + are motivated and described in the PR description.
  • \n
  • Testing strategy is described if automated tests are not included + in the PR.
  • \n
  • Risk is outlined + (performance impact, potential for breakage, maintainability, etc).
  • \n
  • Change is maintainable (easy to change, telemetry, documentation).
  • \n
  • Library release note guidelines are followed. If no release + note is required, add label changelog/no-changelog.
  • \n
  • Documentation is included (in-code, generated user docs, public corp docs).
  • \n
  • Backport labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Title + is accurate.
  • \n
  • No unnecessary + changes are introduced.
  • \n
  • Description + motivates each change.
  • \n
  • Avoids + breaking API changes unless absolutely necessary.
  • \n
  • Testing strategy adequately addresses listed risk(s).
  • \n
  • Change is maintainable (easy to change, telemetry, documentation).
  • \n
  • Release note makes sense to a user of the library.
  • \n
  • Reviewer has explicitly acknowledged and discussed the performance + implications of this PR as reported in the benchmarks PR comment.
  • \n
  • Backport labels are set in a manner that is consistent with + the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n \n
\n + \
\n \n \n\n
\n
\n\n + \ \n\n \"@P403n1x87\"\nP403n1x87\n\n\n\n\n added\n the \n\n changelog/no-changelog\n\n A changelog + entry is not required for this PR.\n label\n\n\n Jul 20, 2023\n\n
\n
\n\n\n\n\n
\n\n + \
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n riotfile.py\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 4 times, most recently\n from\n 8953a58 + \ to\n 575d15e + \ \n + \ Compare\n \n\n\n\n July 20, 2023 13:13 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n
\n + \ \n scripts/needs_testrun.py\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@emmettbutler\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Collaborator\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n emmettbutler\n + \ \n\n \n\n \n\n commented\n\n\n Jul 20, 2023\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n + \ \n
\n + \

I love this idea!

\n
\n
\n\n\n
\n\n + \ \n\n
\n
\n + \
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n + \
\n + \ \n \n
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n
\n\n

\n
\n \"@brettlangdon\"\n\n \n brettlangdon\n \n\n \n\n \n\n + \ left a comment\n\n\n\n\n \n
\n\n

\n
\n \n\n
\n
\n + \ \n
\n \n \n\n

Choose a reason for hiding this comment

\n\n + \

\n The reason will be displayed to describe this + comment to others. Learn more.\n + \

\n\n
\n \n \n
\n\n + \ \n
\n\n \n
\n

I know @gnufede was trying to get CI Visibility + running for this repo, if we go that route, we might be able to ITR ?

\n + \
\n
\n \n
\n\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n + \
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n
\n \n \n
\n
\n
\n + \ \n tests/.suitespec.json\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@P403n1x87\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n Author\n\n\n
\n\n

\n
\n \n\n \n + \ P403n1x87\n \n\n \n\n \n\n commented\n\n\n + \ Jul 20, 2023\n\n\n \n
\n\n + \

\n
\n\n\n
\n\n \n\n + \ \n \n \n \n + \ \n
\n
\n

I know @gnufede + was trying to get CI Visibility running for this repo, if we go that route, + we might be able to ITR ?

\n
\n

My understanding + is that ITR is a per-test rather than per-test-suite. So I see ITR improving + this even further rather than an alternative?

\n
\n
\n\n\n
\n\n + \ \n\n
\n
\n
\n \n \n
\n + \ emmettbutler reacted with thumbs up emoji\n + \
\n \n + \
\n
\n
\n
\n
\n + \
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 3 times, most recently\n from\n 713167a + \ to\n e8c3ecc + \ \n + \ Compare\n \n\n\n\n July 20, 2023 17:15 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@emmettbutler\"\n emmettbutler\n\n\n self-requested a review\n\n\n + \ July 20, 2023 21:23 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n
\"@P403n1x87\"\n P403n1x87\n\n\n dismissed\n emmettbutler\u2019s stale review\n\n\n + \ via\n \n 4e53e79\n + \ \n\n July + 21, 2023 09:41 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n e8c3ecc + \ to\n 4e53e79 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 09:41 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 21, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n
\n + \ \n .circleci/config.yml\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 3 times, most recently\n from\n d2671c5 + \ to\n 19b0da0 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 10:35 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n marked this pull request as + ready for review\n\n July + 21, 2023 10:41 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n a team\n\n as code owners\n\n\n + \ July 21, 2023 10:41 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n majorgreys, + \n jbertran, + \n brettlangdon, + \n emmettbutler + and \n a team\n\n\n\n July 21, 2023 10:41 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n af236d7 + \ to\n a4c0000 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 15:26 \n + \ \n
\n
\n\n\n
\n\n\n
\n + \
\n
\n
\n \n \n
\n
\n + \
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 2 times, most recently\n from\n c50870c + \ to\n e812418 + \ \n + \ Compare\n \n\n\n\n July 24, 2023 12:52 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n + \ \n
\n \n
\n
\"@brettlangdon\"\n brettlangdon\n\n\n dismissed\n emmettbutler\u2019s stale review\n\n\n + \ via\n \n cdb1444\n + \ \n\n July + 24, 2023 16:44 \n \n
\n
\n\n\n
\n\n + \
\n \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n brettlangdon + and \n emmettbutler\n\n\n\n + \ July 24, 2023 18:57 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n + \ \n
\n
\n \n
\n \n
\n + \
P403n1x87\n \n\n added 5 commits\n + \ July 24, 2023 22:13
\n
+ \
\n
\n + \ \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 0d844de\n \n
\n
\n + \
\n
\n
We introduce the concept of suitespec as a way of describing
+        how\nsources affect test runs. We use it to ensure that the debugger\ntests
+        run only if sources that the suite depends on are modified\nby the current
+        commit.
\n
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 1ffab15\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n web + scraping FTW\n \n\n
\n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ a115763\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n add + doctests\n \n\n
\n\n
\n \n\n + \ \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 4d0fb2e\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n use + dynamic config\n \n\n
\n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 690a7b1\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \n
\n
\"@P403n1x87\"\n P403n1x87\n\n\n dismissed stale reviews from + emmettbutler + and brettlangdon\n\n\n + \ via\n \n 690a7b1\n + \ \n\n July + 24, 2023 21:17 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n 5f1daca + \ to\n 690a7b1 + \ \n + \ Compare\n \n\n\n\n July 24, 2023 21:17 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n emmettbutler + and \n brettlangdon\n\n\n\n + \ July 24, 2023 21:17 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n\n\n + \
\n + \ \n
\n
\n \n
\n \n
\n + \
P403n1x87\n \n\n added 2 commits\n + \ July 25, 2023 09:06
\n
+ \
\n
\n + \ \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ f421ece\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 3eacc26\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n + \ \n + \ \n\n + \
\n
\n\n\n \"@P403n1x87\"\n P403n1x87\n\n\n\n merged commit f441242\n into\n\n \n \n DataDog:1.x\n + \ \n\n\n Jul 25, 2023\n\n
\n
\n\n
\n\n
\n
\n \n \n\n
\n\n
\n
\n \"@Yun-Kim\"\nYun-Kim\n\n\n\n mentioned this pull request\n \n Jul 26, 2023\n + \ \n
\n\n\n\n\n \n
\n \n \n \n\n \n \n\n + \ \n \n \n \n\n\n 16 + tasks\n
\n
\n\n\n\n
\n
\n\n + \ \n
\n \n + \ \n + \ \n\n \n
\n \n Yun-Kim \n\n added a commit\n that referenced\n + \ this pull request\n\n \n + \ Jul + 26, 2023\n \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@Yun-Kim\"\n + \
\n
\n\n\n \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n\n \n + \
\n \n 43497d1\n \n
\n
\n + \
\n
\n
#6412
+        changed our circleci configuration setup to be dynamic, but this\ninadvertently
+        removed the `coverage` and `riot_run_latest` circleci\npipeline parameters
+        from the main `.circleci/config.yml` file, which\nbreaks our nightly 1.x coverage
+        pipeline runs. This PR re-adds those\nparameters back and re-enables coverage
+        reporting.\n\nNote that `datastreams`, `langchain`, `elasticsearch`,\n`integration-snapshot`
+        test suites are still failing on 1.x nightly\ncoverage runs and will need
+        to be fixed.\n\n## Checklist\n\n- [x] Change(s) are motivated and described
+        in the PR description.\n- [x] Testing strategy is described if automated tests
+        are not included\nin the PR.\n- [x] Risk is outlined (performance impact,
+        potential for breakage,\nmaintainability, etc).\n- [x] Change is maintainable
+        (easy to change, telemetry, documentation).\n- [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [x] Title is accurate.\n- [x] No unnecessary changes
+        are introduced.\n- [x] Description motivates each change.\n- [x] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [x] Testing strategy adequately addresses
+        listed risk(s).\n- [x] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [x] Release note makes sense to a user of the library.\n-
+        [x] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [x] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n \n
\n \n \n \n\n \n
\n + \ \n romainkomorndatadog + \n\n pushed a commit\n that referenced\n this pull request\n\n + \ \n Aug 8, 2023\n + \ \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@P403n1x87\"\n + \ \n \"@romainkomorndatadog\"\n + \
\n
\n\n\n
\n + \ \n ci: + run the debugger suite only if necessary (#6412)\n + \ \n\n \n + \ \n \n\n
\n\n + \
\n \n\n \n \n \n\n \n\n
\n\n
\n
\n\n \n
\n + \ \n 6838e4b\n \n
\n
\n + \
\n
\n
We introduce the concept of suitespec as a way of describing
+        how sources\naffect test runs. We use it to ensure that the debugger tests
+        run only\nif sources that the suite depends on are modified by the current
+        commit.\n\n## Suitespec Implementation Details\n\nThe suitespec solution is
+        based on a manual configuration of of test\nsuites. To simplify the declaration
+        of file patterns for test suites,\none can make use of _components_, which
+        essentially are a logic\ncollection of patterns. Test suite can then be declared
+        as a list of\ncomponents to reflect their dependencies on these logic parts,
+        and to\nDRY the declaration itself by avoiding repetitions.\n\n## Notes\n\n-
+        When the script fails for any reason, tests are run.\n- It is important that
+        path patterns are listed correctly, or some tests\nmight not run when they
+        are in fact supposed to.\n- Best effort to determine the correct list of changed
+        files via the\nGitHub REST API. When that fails, we fall back to the less
+        accurate `git\ndiff` against the target branch.\n\n## Checklist\n\n- [x] Change(s)
+        are motivated and described in the PR description.\n- [x] Testing strategy
+        is described if automated tests are not included\nin the PR.\n- [x] Risk is
+        outlined (performance impact, potential for breakage,\nmaintainability, etc).\n-
+        [x] Change is maintainable (easy to change, telemetry, documentation).\n-
+        [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [ ] Title is accurate.\n- [ ] No unnecessary changes
+        are introduced.\n- [ ] Description motivates each change.\n- [ ] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [ ] Testing strategy adequately addresses
+        listed risk(s).\n- [ ] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [ ] Release note makes sense to a user of the library.\n-
+        [ ] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [ ] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n \n
\n \n \n \n\n \n
\n + \ \n romainkomorndatadog + \n\n pushed a commit\n that referenced\n this pull request\n\n + \ \n Aug 8, 2023\n + \ \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@Yun-Kim\"\n + \ \n \"@romainkomorndatadog\"\n + \
\n
\n\n\n \n\n + \
\n \n\n \n \n \n\n \n\n
\n\n
\n
\n\n \n
\n + \ \n b38e5ce\n \n
\n
\n + \
\n
\n
#6412
+        changed our circleci configuration setup to be dynamic, but this\ninadvertently
+        removed the `coverage` and `riot_run_latest` circleci\npipeline parameters
+        from the main `.circleci/config.yml` file, which\nbreaks our nightly 1.x coverage
+        pipeline runs. This PR re-adds those\nparameters back and re-enables coverage
+        reporting.\n\nNote that `datastreams`, `langchain`, `elasticsearch`,\n`integration-snapshot`
+        test suites are still failing on 1.x nightly\ncoverage runs and will need
+        to be fixed.\n\n## Checklist\n\n- [x] Change(s) are motivated and described
+        in the PR description.\n- [x] Testing strategy is described if automated tests
+        are not included\nin the PR.\n- [x] Risk is outlined (performance impact,
+        potential for breakage,\nmaintainability, etc).\n- [x] Change is maintainable
+        (easy to change, telemetry, documentation).\n- [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [x] Title is accurate.\n- [x] No unnecessary changes
+        are introduced.\n- [x] Description motivates each change.\n- [x] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [x] Testing strategy adequately addresses
+        listed risk(s).\n- [x] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [x] Release note makes sense to a user of the library.\n-
+        [x] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [x] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n\n\n
\n\n\n\n \n
\n
\n \n
+ \
\n\n\n\n \n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n \n\n\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@brettlangdon\"\n \n brettlangdon\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n brettlangdon approved these changes\n\n + \

\n

\n \n\n \n \"@majorgreys\"\n \n majorgreys\n\n\n + \ Awaiting requested review from majorgreys\n\n + \ majorgreys is a code owner automatically + assigned from DataDog/apm-core-python\n\n \n

\n + \

\n \n\n \n \"@jbertran\"\n \n jbertran\n\n\n + \ Awaiting requested review from jbertran\n\n + \ jbertran was automatically assigned from + DataDog/apm-framework-integrations-reviewers-py\n\n \n

\n + \

\n \n\n \n \"@emmettbutler\"\n \n emmettbutler\n\n\n + \ Awaiting requested review from emmettbutler\n\n\n + \ \n

\n\n \n
\n\n
\n\n\n
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n \n\n changelog/no-changelog\n\n + \ A changelog entry is not required for + this PR.\n\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 4 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n\n\n\n\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:36 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=fQw%2BTZMS4QYZ%2FTA4MNOPSJubEJj6%2B4YAbvZcDJw6R8TTK%2BMVMvH7EZQtu30ktX%2By%2FA6TPcH4dFe9WAR0%2B6WdXM5LeWe7eUOeosO%2FKdcYGMtaudvPV7Tjrv8NPxefhK8GYTzCAI0TN6iQR7CC7S4bKt21Me3zMtaqQlfrbOvexXVbatPyfKM1pSwdQDSYNgXgZpvz6FpudZu8Ito5%2FSqD%2F6P%2B%2Foq57qdkGtm98SrAr1VET3ZWzxV9a2jYhwXfpCKzqaZa4CrIRiFSjs65m6mZvg%3D%3D--qdQ52Vjfe00bTqax--8KxHEOVAdiZ0ixENBtHaQg%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.1954075846.1734014555; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:35 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:35 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED1F:3C75F0:23D0577:323E7BD:675AF65B + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=259.185408,conversation_content-fragment;desc="conversation_content + fragment";dur=1167.36918,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=278.203377,nginx;desc="NGINX";dur=1.232025,glb;desc="GLB";dur=3.090931 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/11534 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n fix(asm): add global states to ensure patching once [backport + 2.15] by christophe-papazian \xB7 Pull Request #11534 \xB7 DataDog/dd-trace-py + \xB7 GitHub\n\n\n\n \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n fix(asm): + add global states to ensure patching once [backport 2.15]\n #11534\n

\n
\n
\n\n + \
\n + \
\n + \ \n Merged\n\n + \
\n\n\n\n\n
\n gnufede\n merged 3 commits into\n\n\n 2.15\n\nfrom\n\nbackport-11522-to-2.15\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Nov 26, + 2024\n\n\n
\n
\n\n\n \n\n\n\n
\n
\n
\n
\n + \
\n \n Merged\n\n + \
\n\n\n\n\n
\n + \

\n \n fix(asm): add global states to ensure patching once [backport + 2.15]\n \n #11534\n

\n\n + \
\n gnufede\n merged 3 commits into\n\n\n 2.15\n\nfrom\n\nbackport-11522-to-2.15\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Nov 26, + 2024\n\n\n
\n
\n
\n + \
\n
\n
\n
\n
\n\n\n\n + \ \n
\n
\n \n \n +74\n + \ \n \n \u221210\n + \ \n \n \n + \ \n \n
\n\n \n
\n\n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"christophe-papazian\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \"@christophe-papazian\"\n\n \n + \ christophe-papazian\n \n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited\n \n + \ \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

Backport 81824b8 + from #11522 to 2.15.

\n

Ensure common patches for SCA and Exploit Prevention are loaded..

\n

only once
\nonly if exploit prevention is active or sca is + active
\nChanges:

\n

factorize load_common_modules logic + in ddtrace.appsec
\nboolean state for patch_common_module and enable_iast_propagation + to ensure they are only called once.
\nensure it's loaded after one click + activation
\nensure it's properly loaded in unit tests if required
\nadd + some failsafe for iast in wrap_open for importerror
\nupdate an iast test + to reflect that common_modules is loaded in the test by default.
\nAPPSEC-55997

\n

Checklist

\n
    \n
  • PR author has checked that all the criteria below are met
  • \n
  • The + PR description includes an overview of the change
  • \n
  • The PR description + articulates the motivation for the change
  • \n
  • The change includes tests + OR the PR description describes a testing strategy
  • \n
  • The PR description + notes risks associated with the change, if any
  • \n
  • Newly-added code + is easy to change
  • \n
  • The change follows the library release note guidelines
  • \n
  • The change + includes or references documentation updates if necessary
  • \n
  • Backport + labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Reviewer + has checked that all the criteria below are met
  • \n
  • Title is accurate
  • \n
  • All + changes are related to the pull request's stated goal
  • \n
  • Avoids breaking + API changes
  • \n
  • Testing strategy adequately addresses + listed risks
  • \n
  • Newly-added code is easy to change
  • \n
  • Release + note makes sense to a user of the library
  • \n
  • If necessary, author has + acknowledged and discussed the performance implications of this PR as reported + in the benchmarks PR comment
  • \n
  • Backport labels are set in a manner + that is consistent with the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n + \ \n
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@christophe-papazian\"\n + \
\n
\n\n
\n \n + \ fix(asm): + add global states to ensure patching once (#11522)\n + \ \n\n \n + \ + \ \n
\n\n
\n \n\n + \ \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ cd59645\n \n
\n
\n + \
\n
\n
Ensure common patches for SCA and Exploit Prevention are loaded..\n-
+        only once\n- only if exploit prevention is active or sca is active\n\nChanges:\n-
+        factorize load_common_modules logic in ddtrace.appsec\n- boolean state for
+        patch_common_module and enable_iast_propagation to\nensure they are only called
+        once.\n- ensure it's loaded after one click activation\n- ensure it's properly
+        loaded in unit tests if required\n- add some failsafe for iast in wrap_open
+        for importerror\n- update an iast test to reflect that common_modules is loaded
+        in the\ntest by default.\n\nAPPSEC-55997\n\n- [x] PR author has checked that
+        all the criteria below are met\n- The PR description includes an overview
+        of the change\n- The PR description articulates the motivation for the change\n-
+        The change includes tests OR the PR description describes a testing\nstrategy\n-
+        The PR description notes risks associated with the change, if any\n- Newly-added
+        code is easy to change\n- The change follows the [library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\n-
+        The change includes or references documentation updates if necessary\n- Backport
+        labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n-
+        [x] Reviewer has checked that all the criteria below are met\n- Title is accurate\n-
+        All changes are related to the pull request's stated goal\n- Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges\n-
+        Testing strategy adequately addresses listed risks\n- Newly-added code is
+        easy to change\n- Release note makes sense to a user of the library\n- If
+        necessary, author has acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment\n- Backport labels are
+        set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)\n\n(cherry
+        picked from commit 81824b8)
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n marked + this pull request as ready for review\n\n November + 25, 2024 16:51 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n requested + review from\n a team\n\n + \ as code owners\n\n\n + \ November 25, 2024 16:51 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n requested + review from\n gnufede + and \n emmettbutler\n\n\n\n + \ November 25, 2024 16:51 \n + \ \n
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@github-actions\"\n\n \n + \ \"GitHub\n \n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n github-actions\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

CODEOWNERS have + been resolved as:

\n
releasenotes/notes/exploit_prevention_patch_fix-1bdd7540e1d085d8.yaml
+        \  @DataDog/apm-python\nddtrace/_monkey.py                                                      @DataDog/apm-core-python\nddtrace/appsec/__init__.py
+        \                                             @DataDog/asm-python\nddtrace/appsec/_common_module_patches.py
+        \                               @DataDog/asm-python\nddtrace/appsec/_iast/__init__.py
+        \                                       @DataDog/asm-python\nddtrace/appsec/_remoteconfiguration.py
+        \                                 @DataDog/asm-python\ntests/appsec/integrations/test_flask_telemetry.py
+        \                      @DataDog/asm-python\ntests/utils.py                                                          @DataDog/python-guild\n
\n\n + \
\n
\n\n\n
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@datadog-dd-trace-py-rkomorn\"\n\n
\n\n\n + \
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n datadog-dd-trace-py-rkomorn\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Datadog Report

\n

Branch report: + backport-11522-to-2.15
\nCommit + report: c476a58
\nTest + service: dd-trace-py

\n

\u2705 + 0 Failed, 592 Passed, 694 Skipped, 19m 30.54s Total duration (15m 23.31s time + saved)

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"gnufede\"\n + \
\n \n
\n + \
\n + \ \n gnufede\n \n\n + \ \n\n approved these changes\n\n\n \n \n + \ Nov + 25, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n \n
\n + \ \n
\n + \ \n
\n
\n \"@gnufede\"\n gnufede\n\nenabled + auto-merge (squash)\n\n November + 25, 2024 17:32 \n \n
\n
\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@pr-commenter\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n pr-commenter\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Benchmarks

\n

Benchmark execution + time: 2024-11-26 21:13:50

\n

Comparing candidate commit + c476a58 + in PR branch backport-11522-to-2.15 with + baseline commit b462888 + in branch 2.15.

\n

Found + 0 performance improvements and 0 performance regressions! Performance is the + same for 371 metrics, 53 unstable metrics.

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"erikayasuda\"\n + \
\n \n
\n + \
\n + \ \n erikayasuda\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Nov + 26, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n + \ \n
\n
\n \n
\n \n
\n + \
christophe-papazian\n \nand others\n + \ added 2 commits\n November + 26, 2024 18:39
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@christophe-papazian\"\n + \
\n
\n\n \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 3ac9ef8\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@erikayasuda\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ c476a58\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n + \ \n + \ \n\n + \
\n
\n\n \n + \ \"@gnufede\"\n gnufede\n\n\n\n + \ merged commit 2d6800f\n into\n\n \n \n 2.15\n \n\n\n Nov 26, 2024\n\n
\n 584 checks passed\n
\n\n
\n + \ \n \n + \ \n \n + \ \n\n + \ \n
\n
\n
\n\n
\n\n + \
\n \n
\n \n
\n
\"@gnufede\"\n gnufede\n\n\n + \ \n deleted the\n \n + \ \n backport-11522-to-2.15\n \n branch\n\n + \ November 26, 2024 21:16 \n + \ \n
\n
\n\n\n
\n\n\n\n\n\n \n
\n
\n \n
+ \
\n\n\n\n
\n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n
\n\n
\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@erikayasuda\"\n \n erikayasuda\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n erikayasuda approved these changes\n\n + \

\n

\n \n\n \n \"@gnufede\"\n \n gnufede\n\n\n\n \n + \ \n \n + \ \n\n \n \n gnufede approved these changes\n\n + \

\n

\n \n\n \n \"@emmettbutler\"\n \n emmettbutler\n\n\n + \ Awaiting requested review from emmettbutler\n\n + \ emmettbutler is a code owner automatically + assigned from DataDog/apm-python\n\n \n

\n\n \n
\n\n
\n\n\n + \
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n None yet\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 3 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n
\n\n\n\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:37 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=LtiHLNx8mstCD1%2F8GdlLK3Ek4%2FUx0Fe2Z5G%2BgyD3AJIfkjlnrgBVvR4nRGY7DTatKP%2Bou1B2HQOEbvPrmsRQSzNr4QrkXD%2B%2BoelH3OrGoVb5p8iCoqQMgEy0wWGa1LZNg6ElbtORrY%2BOTZc3pcswIwJXzwyf5B41ot6LyczBcI7LxdQXLwION06Cw9M4GChczVf00HfGJq85K%2FijVuPAL%2BSNpc0CpSymS4zbxOOTeM85%2BMUXqmgfjypU8Hdl1TUYqKHqDF25MpY1LOSlKhlLLw%3D%3D--xSnv%2BlNibojh5RSX--pzM3%2Fm4gngMObk6H3%2FTOfw%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.1210734268.1734014556; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:36 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:36 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED26:1035B4:234099A:312F92C:675AF65C + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=450.768495,conversation_content-fragment;desc="conversation_content + fragment";dur=576.513283,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=305.288275,nginx;desc="NGINX";dur=1.093278,glb;desc="GLB";dur=4.679312 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/11690 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n ci: store fake DD_API_KEY as a secret by brettlangdon \xB7 + Pull Request #11690 \xB7 DataDog/dd-trace-py \xB7 GitHub\n\n\n\n \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n ci: + store fake DD_API_KEY as a secret\n #11690\n + \

\n
\n
\n\n
\n
\n \n + Open\n\n
\n\n\n\n\n
\n brettlangdon\n\n wants to merge\n 1\n + \ commit into\n\n\n main\n\n + \
\n
\n + \ \n base:\n + \ main\n \n + \ \n \n + \ \n
\n
\n + \
\n Choose + a base branch\n \n
\n\n + \ \n
\n + \ \n
\n\n \n \n\n
\n \n\n \n\n \n\n\n
\n
\n \n + \ \n \n \n Loading\n\n + \
\n
\n\n \n\n\n \n\n + \
\n\n \n
\n + \
\n
\n
\n\n \n + \
\n
\n\n
\n \n
\n\nfrom\n\nbrettlangdon-patch-3\n \n \n \n\n \n \n\n + \
\n
\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n + \
\n\n
\n
\n\n\n \n\n\n\n
\n
\n + \
\n + \
\n + \
\n \n Open\n\n
\n\n\n\n\n + \
\n

\n \n + \ ci: store fake DD_API_KEY as a secret\n \n #11690\n

\n\n
\n brettlangdon\n\n + \ wants to merge\n 1\n + \ commit into\n\n\n main\n\nfrom\n\nbrettlangdon-patch-3\n \n \n \n\n \n \n\n + \
\n
\n\n\n\n\n + \
\n
\n
\n
\n + \
\n
\n
\n
\n\n\n\n \n
\n
\n \n \n +2\n \n \n \u22122\n \n \n + \ \n \n \n + \
\n\n \n
\n\n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"brettlangdon\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n
\n\n

\n
\n \"@brettlangdon\"\n\n \n brettlangdon\n \n\n \n\n \n\n + \ commented\n\n\n Dec 12, 2024\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited\n \n + \ \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

Checklist

\n
    \n
  • PR author + has checked that all the criteria below are met
  • \n
  • The PR description + includes an overview of the change
  • \n
  • The PR description articulates + the motivation for the change
  • \n
  • The change includes tests OR the PR + description describes a testing strategy
  • \n
  • The PR description notes + risks associated with the change, if any
  • \n
  • Newly-added code is easy + to change
  • \n
  • The change follows the library release note guidelines
  • \n
  • The change + includes or references documentation updates if necessary
  • \n
  • Backport + labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Reviewer + has checked that all the criteria below are met
  • \n
  • Title is accurate
  • \n
  • All + changes are related to the pull request's stated goal
  • \n
  • Avoids breaking + API changes
  • \n
  • Testing strategy adequately addresses + listed risks
  • \n
  • Newly-added code is easy to change
  • \n
  • Release + note makes sense to a user of the library
  • \n
  • If necessary, author has + acknowledged and discussed the performance implications of this PR as reported + in the benchmarks PR comment
  • \n
  • Backport labels are set in a manner + that is consistent with the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n + \ \n
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@brettlangdon\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ a6675d3\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \
\n\n \n\n \"@brettlangdon\"\nbrettlangdon\n\n\n\n\n added\n the \n\n changelog/no-changelog\n\n A changelog + entry is not required for this PR.\n label\n\n\n Dec 12, 2024\n\n
\n
\n\n\n + \
\n \n
\n \n
\n + \
\"@brettlangdon\"\n brettlangdon\n\n\n requested review from\n + \ a team\n\n as code owners\n\n\n + \ December 12, 2024 13:39 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@brettlangdon\"\n brettlangdon\n\n\n requested review from\n + \ avara1986 + and \n erikayasuda\n\n\n\n + \ December 12, 2024 13:39 \n + \ \n
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@github-actions\"\n\n \n + \ \"GitHub\n \n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n github-actions\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

CODEOWNERS have + been resolved as:

\n
.github/workflows/system-tests.yml
+        \                                     @DataDog/python-guild @DataDog/apm-core-python\n
\n\n + \
\n
\n\n\n
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"romainkomorndatadog\"\n + \
\n \n
\n + \
\n + \ \n romainkomorndatadog\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Dec + 12, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@datadog-dd-trace-py-rkomorn\"\n\n
\n\n\n + \
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n datadog-dd-trace-py-rkomorn\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Datadog Report

\n

Branch report: + brettlangdon-patch-3
\nCommit + report: a6675d3
\nTest + service: dd-trace-py

\n

\u2705 + 0 Failed, 55 Passed, 1413 Skipped, 1m 29.81s Total duration (35m 20.17s time + saved)

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@brettlangdon\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n Author\n\n\n + \
\n\n

\n
\n + \ \n\n \n brettlangdon\n + \ \n\n \n\n \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

/merge

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@dd-devflow\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n dd-devflow\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \ \n

Devflow running: /merge

\n

View all feedbacks in Devflow UI.

\n
\n

2024-12-12 13:54:30 UTC \u2139\uFE0F MergeQueue: + waiting for PR to be ready

\n

This merge request is not + mergeable yet, because of pending checks/missing approvals. It will be added + to the queue as soon as checks pass and/or get approvals.
\nNote: + if you pushed new commits since the last approval, you may need additional + approval.
\nYou can remove it from the waiting list with /remove + command.

\n

Use /merge -c + to cancel this operation!

\n
\n

2024-12-12 + 14:26:14 UTC \u2139\uFE0F MergeQueue: merge request + added to the queue

\n

The median merge time in main + is 34m.

\n

Use /merge -c + to cancel this operation!

\n
\n

\u23F3 + command still in progress ...

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \
\n\n \n\n \"@dd-devflow\"\ndd-devflow\nbot\n\n\n\n added\n the \n\n mergequeue-status: waiting\n\n label\n\n\n Dec 12, 2024\n\n
\n
\n\n\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@pr-commenter\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n pr-commenter\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Benchmarks

\n

Benchmark execution + time: 2024-12-12 14:24:20

\n

Comparing candidate commit + a6675d3 + in PR branch brettlangdon-patch-3 with + baseline commit 385d8e0 + in branch main.

\n

Found + 0 performance improvements and 0 performance regressions! Performance is the + same for 394 metrics, 2 unstable metrics.

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \ \n
\n\n\n\n\n
\n\n\n\n\n\n + \ \n
\n
\n \n
+ \
\n\n\n\n
\n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n
\n\n
\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@romainkomorndatadog\"\n \n romainkomorndatadog\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n romainkomorndatadog approved these changes\n\n + \

\n

\n \n\n \n \"@avara1986\"\n \n avara1986\n\n\n + \ Awaiting requested review from avara1986\n\n + \ avara1986 is a code owner automatically + assigned from DataDog/python-guild\n\n \n

\n

\n \n\n \n \"@erikayasuda\"\n \n erikayasuda\n\n\n + \ Awaiting requested review from erikayasuda\n\n + \ erikayasuda is a code owner automatically + assigned from DataDog/apm-core-python\n\n \n

\n\n + \ \n
\n\n
\n\n\n
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n \n\n changelog/no-changelog\n\n + \ A changelog entry is not required for + this PR.\n \n\n mergequeue-status: + in_progress\n\n\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 2 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n
\n\n\n
\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:38 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=goPCFokfo9CoHjAGnWH6245viFzykZOSTQZe2I4w0VI8O%2FBqLC9Xv8AW%2F6ZjmrAmWBiSwR%2BJfSgAxkI4KR6iJ7iP7KTOza9Z%2Fx3f69HoNCXVVOHyocDogP%2Bkm1AiUdpG5y74PTCPFqrxrAFXC27mPRlmZoEWOfCSWgl4YRkTZv70BAdIcjfmqhFa%2BtQhB0TltjWeDdF8qyOXZzTY7EorwqYP%2BPT%2FJYz2v61wLYsHH22O6rrrwLYlwr2P3x6Yb3Bx2aKM6eK975vB0hXOQtNMug%3D%3D--y2NTdNqEEkcwaCVD--Ce4x%2FRlMrMinpyEeKuACLQ%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.2015969099.1734014557; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:37 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:37 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED29:27C835:21B19AA:2F4A603:675AF65D + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=412.175919,conversation_content-fragment;desc="conversation_content + fragment";dur=448.910543,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=302.334653,nginx;desc="NGINX";dur=1.331055,glb;desc="GLB";dur=3.067062 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/6388/files?page=1 + response: + body: + string: '[{"sha":"1325b0864ebc6d4c40970f698018ac2524fe4e33","filename":"ddtrace/debugging/_expressions.py","status":"modified","additions":2,"deletions":2,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/ddtrace%2Fdebugging%2F_expressions.py","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/ddtrace%2Fdebugging%2F_expressions.py","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/ddtrace%2Fdebugging%2F_expressions.py?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -292,8 +292,8 @@ def _compile_operation(ast):\n \n def _compile_literal(ast):\n # + type: (DDASTType) -> Optional[List[Instr]]\n- # literal => | + true | false | \"string\"\n- if not isinstance(ast, (str, int, float, bool)):\n+ # + literal => | true | false | \"string\" | null\n+ if not (isinstance(ast, + (str, int, float, bool)) or ast is None):\n return None\n \n return + [Instr(\"LOAD_CONST\", ast)]"},{"sha":"b4517ad79f67a2b362360ae8e7e0b0b3fa2e4ea8","filename":"releasenotes/notes/fix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","status":"added","additions":4,"deletions":0,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -0,0 +1,4 @@\n+---\n+fixes:\n+ - |\n+ dynamic instrumentation: handle + null literal in conditions and expressions."},{"sha":"3c4d96fe66b871238c02651af82d43a1ad8085c3","filename":"tests/debugging/test_expressions.py","status":"modified","additions":1,"deletions":0,"changes":1,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/tests%2Fdebugging%2Ftest_expressions.py","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/tests%2Fdebugging%2Ftest_expressions.py","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/tests%2Fdebugging%2Ftest_expressions.py?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -72,6 +72,7 @@ def __getitem__(self, name):\n # Test argument predicates + and operations\n ({\"contains\": [{\"ref\": \"payload\"}, \"hello\"]}, + {\"payload\": \"hello world\"}, True),\n ({\"eq\": [{\"ref\": \"hits\"}, + True]}, {\"hits\": True}, True),\n+ ({\"eq\": [{\"ref\": \"hits\"}, + None]}, {\"hits\": None}, True),\n ({\"substring\": [{\"ref\": \"payload\"}, + 4, 7]}, {\"payload\": \"hello world\"}, \"hello world\"[4:7]),\n ({\"any\": + [{\"ref\": \"collection\"}, {\"isEmpty\": {\"ref\": \"@it\"}}]}, {\"collection\": + [\"foo\", \"bar\", \"\"]}, True),\n ({\"startsWith\": [{\"ref\": \"local_string\"}, + \"hello\"]}, {\"local_string\": \"hello world!\"}, True),"}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '3264' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:38 GMT + ETag: + - '"85a10accfc7f3330efa4961171936a0e3ea39a94e59a1811461b17a9a610bdb4"' + Last-Modified: + - Sun, 08 Dec 2024 16:19:43 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED30:1C31C4:1AEE9EF:3582AA6:675AF65E + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4898' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '102' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/6388/files?page=2 + response: + body: + string: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '2' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:38 GMT + ETag: + - '"4acd3c336ca9625e24fba0a2ea9cad06cf4693ace7e76d92c8a9a05f03c7b0cd"' + Last-Modified: + - Sun, 08 Dec 2024 16:19:43 GMT + Link: + - ; rel="prev", + ; rel="last", + ; rel="first" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED34:27B12E:1B9E8A9:36ED322:675AF65E + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4897' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '103' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/11690/files?page=1 + response: + body: + string: '[{"sha":"ce795db4fe24584e0a3c105f6f130071b1292cbe","filename":".github/workflows/system-tests.yml","status":"modified","additions":2,"deletions":2,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/a6675d3799af44382bd5b677c56a94843a6433aa/.github%2Fworkflows%2Fsystem-tests.yml","raw_url":"https://github.com/DataDog/dd-trace-py/raw/a6675d3799af44382bd5b677c56a94843a6433aa/.github%2Fworkflows%2Fsystem-tests.yml","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/.github%2Fworkflows%2Fsystem-tests.yml?ref=a6675d3799af44382bd5b677c56a94843a6433aa","patch":"@@ + -54,7 +54,7 @@ jobs:\n # system-tests requires an API_KEY, but it does + not have to be a valid key, as long as we don''t run a scenario\n # + that make assertion on backend data. Using a fake key allow to run system + tests on PR originating from forks.\n # If ever it''s needed, a valid + key exists in the repo, using ${{ secrets.DD_API_KEY }}\n- DD_API_KEY: + 1234567890abcdef1234567890abcdef\n+ DD_API_KEY: ${{ secrets.FAKE_DD_API_KEY + }}\n CMAKE_BUILD_PARALLEL_LEVEL: 12\n SYSTEM_TESTS_AWS_ACCESS_KEY_ID: + ${{ secrets.IDM_AWS_ACCESS_KEY_ID }}\n SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: + ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }}\n@@ -106,7 +106,7 @@ jobs:\n # + system-tests requires an API_KEY, but it does not have to be a valid key, + as long as we don''t run a scenario\n # that make assertion on backend + data. Using a fake key allow to run system tests on PR originating from forks.\n # + If ever it''s needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY + }}\n- DD_API_KEY: 1234567890abcdef1234567890abcdef\n+ DD_API_KEY: + ${{ secrets.FAKE_DD_API_KEY }}\n CMAKE_BUILD_PARALLEL_LEVEL: 12\n SYSTEM_TESTS_AWS_ACCESS_KEY_ID: + ${{ secrets.IDM_AWS_ACCESS_KEY_ID }}\n SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: + ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }}"}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '1930' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:39 GMT + ETag: + - '"e91026bdc9aa216ff163739444e03dfcf4e719131166fd717d6e5a7eafbd54fe"' + Last-Modified: + - Thu, 12 Dec 2024 14:26:20 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED39:1C31C4:1AEEBE6:3582E96:675AF65E + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4896' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '104' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/11690/files?page=2 + response: + body: + string: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '2' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:39 GMT + ETag: + - '"4acd3c336ca9625e24fba0a2ea9cad06cf4693ace7e76d92c8a9a05f03c7b0cd"' + Last-Modified: + - Thu, 12 Dec 2024 14:26:20 GMT + Link: + - ; rel="prev", + ; rel="last", + ; rel="first" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED3C:38B93F:1B68A83:36861E8:675AF65F + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4895' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '105' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/6412 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n ci: run the debugger suite only if necessary by P403n1x87 + \xB7 Pull Request #6412 \xB7 DataDog/dd-trace-py \xB7 GitHub\n\n\n\n + \ \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n ci: + run the debugger suite only if necessary\n #6412\n

\n
\n
\n\n
\n
\n \n + \ Merged\n\n
\n\n\n\n\n + \
\n P403n1x87\n + \ merged 7 commits into\n\n\n DataDog:1.x\n\nfrom\n\nP403n1x87:ci/debugger-suitespec\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Jul 25, + 2023\n\n\n
\n
\n\n\n \n\n\n\n
\n
\n
\n
\n + \
\n \n Merged\n\n + \
\n\n\n\n\n
\n + \

\n \n ci: run the debugger suite only if necessary\n \n + \ #6412\n

\n\n + \
\n P403n1x87\n merged 7 commits into\n\n\n DataDog:1.x\n\nfrom\n\nP403n1x87:ci/debugger-suitespec\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Jul 25, + 2023\n\n\n
\n
\n
\n + \
\n
\n
\n
\n
\n\n\n\n + \ \n + \ \n\n\n + \ \n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"P403n1x87\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \"@P403n1x87\"\n\n \n + \ P403n1x87\n \n\n \n\n \n\n commented\n\n\n + \ Jul + 20, 2023\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited by majorgreys\n + \ \n \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

We introduce the concept + of suitespec as a way of describing how sources affect test runs. We use it + to ensure that the debugger tests run only if sources that the suite depends + on are modified by the current commit.

\n

Suitespec Implementation + Details

\n

The suitespec solution is based on a manual + configuration of of test suites. To simplify the declaration of file patterns + for test suites, one can make use of components, which essentially + are a logic collection of patterns. Test suite can then be declared as a list + of components to reflect their dependencies on these logic parts, and to DRY + the declaration itself by avoiding repetitions.

\n

Notes

\n
    \n
  • When the script fails for any reason, tests are run.
  • \n
  • It + is important that path patterns are listed correctly, or some tests might + not run when they are in fact supposed to.
  • \n
  • Best effort to determine + the correct list of changed files via the GitHub REST API. When that fails, + we fall back to the less accurate git diff + against the target branch.
  • \n
\n

Checklist

\n
    \n
  • Change(s) + are motivated and described in the PR description.
  • \n
  • Testing strategy is described if automated tests are not included + in the PR.
  • \n
  • Risk is outlined + (performance impact, potential for breakage, maintainability, etc).
  • \n
  • Change is maintainable (easy to change, telemetry, documentation).
  • \n
  • Library release note guidelines are followed. If no release + note is required, add label changelog/no-changelog.
  • \n
  • Documentation is included (in-code, generated user docs, public corp docs).
  • \n
  • Backport labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Title + is accurate.
  • \n
  • No unnecessary + changes are introduced.
  • \n
  • Description + motivates each change.
  • \n
  • Avoids + breaking API changes unless absolutely necessary.
  • \n
  • Testing strategy adequately addresses listed risk(s).
  • \n
  • Change is maintainable (easy to change, telemetry, documentation).
  • \n
  • Release note makes sense to a user of the library.
  • \n
  • Reviewer has explicitly acknowledged and discussed the performance + implications of this PR as reported in the benchmarks PR comment.
  • \n
  • Backport labels are set in a manner that is consistent with + the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n \n
\n + \
\n \n \n\n
\n
\n\n + \ \n\n \"@P403n1x87\"\nP403n1x87\n\n\n\n\n added\n the \n\n changelog/no-changelog\n\n A changelog + entry is not required for this PR.\n label\n\n\n Jul 20, 2023\n\n
\n
\n\n\n\n\n
\n\n + \
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n riotfile.py\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 4 times, most recently\n from\n 8953a58 + \ to\n 575d15e + \ \n + \ Compare\n \n\n\n\n July 20, 2023 13:13 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n
\n + \ \n scripts/needs_testrun.py\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@emmettbutler\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Collaborator\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n emmettbutler\n + \ \n\n \n\n \n\n commented\n\n\n Jul 20, 2023\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n + \ \n
\n + \

I love this idea!

\n
\n
\n\n\n
\n\n + \ \n\n
\n
\n + \
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n + \
\n + \ \n \n
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n
\n\n

\n
\n \"@brettlangdon\"\n\n \n brettlangdon\n \n\n \n\n \n\n + \ left a comment\n\n\n\n\n \n
\n\n

\n
\n \n\n
\n
\n + \ \n
\n \n \n\n

Choose a reason for hiding this comment

\n\n + \

\n The reason will be displayed to describe this + comment to others. Learn more.\n + \

\n\n
\n \n \n
\n\n + \ \n
\n\n \n
\n

I know @gnufede was trying to get CI Visibility + running for this repo, if we go that route, we might be able to ITR ?

\n + \
\n
\n \n
\n\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n + \
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n
\n \n \n
\n
\n
\n + \ \n tests/.suitespec.json\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@P403n1x87\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n Author\n\n\n
\n\n

\n
\n \n\n \n + \ P403n1x87\n \n\n \n\n \n\n commented\n\n\n + \ Jul 20, 2023\n\n\n \n
\n\n + \

\n
\n\n\n
\n\n \n\n + \ \n \n \n \n + \ \n
\n
\n

I know @gnufede + was trying to get CI Visibility running for this repo, if we go that route, + we might be able to ITR ?

\n
\n

My understanding + is that ITR is a per-test rather than per-test-suite. So I see ITR improving + this even further rather than an alternative?

\n
\n
\n\n\n
\n\n + \ \n\n
\n
\n
\n \n \n
\n + \ emmettbutler reacted with thumbs up emoji\n + \
\n \n + \
\n
\n
\n
\n
\n + \
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 3 times, most recently\n from\n 713167a + \ to\n e8c3ecc + \ \n + \ Compare\n \n\n\n\n July 20, 2023 17:15 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@emmettbutler\"\n emmettbutler\n\n\n self-requested a review\n\n\n + \ July 20, 2023 21:23 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 20, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n
\"@P403n1x87\"\n P403n1x87\n\n\n dismissed\n emmettbutler\u2019s stale review\n\n\n + \ via\n \n 4e53e79\n + \ \n\n July + 21, 2023 09:41 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n e8c3ecc + \ to\n 4e53e79 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 09:41 \n + \ \n
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 21, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n
\n + \ \n .circleci/config.yml\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 3 times, most recently\n from\n d2671c5 + \ to\n 19b0da0 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 10:35 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n marked this pull request as + ready for review\n\n July + 21, 2023 10:41 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n a team\n\n as code owners\n\n\n + \ July 21, 2023 10:41 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n majorgreys, + \n jbertran, + \n brettlangdon, + \n emmettbutler + and \n a team\n\n\n\n July 21, 2023 10:41 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n af236d7 + \ to\n a4c0000 + \ \n + \ Compare\n \n\n\n\n July 21, 2023 15:26 \n + \ \n
\n
\n\n\n
\n\n\n
\n + \
\n
\n
\n \n \n
\n
\n + \
\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n 2 times, most recently\n from\n c50870c + \ to\n e812418 + \ \n + \ Compare\n \n\n\n\n July 24, 2023 12:52 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n
\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"P403n1x87\"\n + \
\n \n
\n + \
\n + \ \n P403n1x87\n + \ \n\n \n\n commented\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n \n \nShow resolved\n + \ \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n + \ \n
\n \n
\n
\"@brettlangdon\"\n brettlangdon\n\n\n dismissed\n emmettbutler\u2019s stale review\n\n\n + \ via\n \n cdb1444\n + \ \n\n July + 24, 2023 16:44 \n \n
\n
\n\n\n
\n\n + \
\n \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n reviewed\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
+ \ \n + \
\n + \
\n \n .circleci/config.templ.yml\n\n + \ \n Outdated\n \n \n \nShow + resolved\n \n \nHide resolved\n + \
\n
\n
\n + \ \n \n \n \n\n \n
\n
\n\n\n\n\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n brettlangdon + and \n emmettbutler\n\n\n\n + \ July 24, 2023 18:57 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n \n
\n \n
\n \"emmettbutler\"\n + \
\n \n
\n + \
\n + \ \n emmettbutler\n + \ \n\n \n\n previously approved these changes\n\n\n + \ \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n + \
\n + \ \n \n
\n
\n
+ \
\n
\n\n\n\n\n
\n + \ \n
\n
\n \n
\n \n
\n + \
P403n1x87\n \n\n added 5 commits\n + \ July 24, 2023 22:13
\n
+ \
\n
\n + \ \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 0d844de\n \n
\n
\n + \
\n
\n
We introduce the concept of suitespec as a way of describing
+        how\nsources affect test runs. We use it to ensure that the debugger\ntests
+        run only if sources that the suite depends on are modified\nby the current
+        commit.
\n
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 1ffab15\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n web + scraping FTW\n \n\n
\n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ a115763\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n add + doctests\n \n\n
\n\n
\n \n\n + \ \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 4d0fb2e\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \
\n \n use + dynamic config\n \n\n
\n\n
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 690a7b1\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \n
\n
\"@P403n1x87\"\n P403n1x87\n\n\n dismissed stale reviews from + emmettbutler + and brettlangdon\n\n\n + \ via\n \n 690a7b1\n + \ \n\n July + 24, 2023 21:17 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n force-pushed\n + \ the\n \n \n ci/debugger-suitespec\n\n\n + \ \n branch\n from\n 5f1daca + \ to\n 690a7b1 + \ \n + \ Compare\n \n\n\n\n July 24, 2023 21:17 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@P403n1x87\"\n P403n1x87\n\n\n requested review from\n emmettbutler + and \n brettlangdon\n\n\n\n + \ July 24, 2023 21:17 \n + \ \n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \"brettlangdon\"\n + \
\n \n
\n + \
\n + \ \n brettlangdon\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Jul + 24, 2023\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n\n\n + \
\n + \ \n
\n
\n \n
\n \n
\n + \
P403n1x87\n \n\n added 2 commits\n + \ July 25, 2023 09:06
\n
+ \
\n
\n + \ \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ f421ece\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@P403n1x87\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 3eacc26\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n + \ \n + \ \n\n + \
\n
\n\n\n \"@P403n1x87\"\n P403n1x87\n\n\n\n merged commit f441242\n into\n\n \n \n DataDog:1.x\n + \ \n\n\n Jul 25, 2023\n\n
\n
\n\n
\n\n
\n
\n \n \n\n
\n\n
\n
\n \"@Yun-Kim\"\nYun-Kim\n\n\n\n mentioned this pull request\n \n Jul 26, 2023\n + \ \n
\n\n\n\n\n \n
\n \n \n \n\n \n \n\n + \ \n \n \n \n\n\n 16 + tasks\n
\n
\n\n\n\n
\n
\n\n + \ \n
\n \n + \ \n + \ \n\n \n
\n \n Yun-Kim \n\n added a commit\n that referenced\n + \ this pull request\n\n \n + \ Jul + 26, 2023\n \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@Yun-Kim\"\n + \
\n
\n\n\n \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n\n \n + \
\n \n 43497d1\n \n
\n
\n + \
\n
\n
#6412
+        changed our circleci configuration setup to be dynamic, but this\ninadvertently
+        removed the `coverage` and `riot_run_latest` circleci\npipeline parameters
+        from the main `.circleci/config.yml` file, which\nbreaks our nightly 1.x coverage
+        pipeline runs. This PR re-adds those\nparameters back and re-enables coverage
+        reporting.\n\nNote that `datastreams`, `langchain`, `elasticsearch`,\n`integration-snapshot`
+        test suites are still failing on 1.x nightly\ncoverage runs and will need
+        to be fixed.\n\n## Checklist\n\n- [x] Change(s) are motivated and described
+        in the PR description.\n- [x] Testing strategy is described if automated tests
+        are not included\nin the PR.\n- [x] Risk is outlined (performance impact,
+        potential for breakage,\nmaintainability, etc).\n- [x] Change is maintainable
+        (easy to change, telemetry, documentation).\n- [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [x] Title is accurate.\n- [x] No unnecessary changes
+        are introduced.\n- [x] Description motivates each change.\n- [x] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [x] Testing strategy adequately addresses
+        listed risk(s).\n- [x] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [x] Release note makes sense to a user of the library.\n-
+        [x] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [x] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n \n
\n \n \n \n\n \n
\n + \ \n romainkomorndatadog + \n\n pushed a commit\n that referenced\n this pull request\n\n + \ \n Aug 8, 2023\n + \ \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@P403n1x87\"\n + \ \n \"@romainkomorndatadog\"\n + \
\n
\n\n\n
\n + \ \n ci: + run the debugger suite only if necessary (#6412)\n + \ \n\n \n + \ \n \n\n
\n\n + \
\n \n\n \n \n \n\n \n\n
\n\n
\n
\n\n \n
\n + \ \n 6838e4b\n \n
\n
\n + \
\n
\n
We introduce the concept of suitespec as a way of describing
+        how sources\naffect test runs. We use it to ensure that the debugger tests
+        run only\nif sources that the suite depends on are modified by the current
+        commit.\n\n## Suitespec Implementation Details\n\nThe suitespec solution is
+        based on a manual configuration of of test\nsuites. To simplify the declaration
+        of file patterns for test suites,\none can make use of _components_, which
+        essentially are a logic\ncollection of patterns. Test suite can then be declared
+        as a list of\ncomponents to reflect their dependencies on these logic parts,
+        and to\nDRY the declaration itself by avoiding repetitions.\n\n## Notes\n\n-
+        When the script fails for any reason, tests are run.\n- It is important that
+        path patterns are listed correctly, or some tests\nmight not run when they
+        are in fact supposed to.\n- Best effort to determine the correct list of changed
+        files via the\nGitHub REST API. When that fails, we fall back to the less
+        accurate `git\ndiff` against the target branch.\n\n## Checklist\n\n- [x] Change(s)
+        are motivated and described in the PR description.\n- [x] Testing strategy
+        is described if automated tests are not included\nin the PR.\n- [x] Risk is
+        outlined (performance impact, potential for breakage,\nmaintainability, etc).\n-
+        [x] Change is maintainable (easy to change, telemetry, documentation).\n-
+        [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [ ] Title is accurate.\n- [ ] No unnecessary changes
+        are introduced.\n- [ ] Description motivates each change.\n- [ ] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [ ] Testing strategy adequately addresses
+        listed risk(s).\n- [ ] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [ ] Release note makes sense to a user of the library.\n-
+        [ ] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [ ] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n \n
\n \n \n \n\n \n
\n + \ \n romainkomorndatadog + \n\n pushed a commit\n that referenced\n this pull request\n\n + \ \n Aug 8, 2023\n + \ \n
\n \n
\n + \
\n
\n \n
\n
\n \n \"@Yun-Kim\"\n + \ \n \"@romainkomorndatadog\"\n + \
\n
\n\n\n \n\n + \
\n \n\n \n \n \n\n \n\n
\n\n
\n
\n\n \n
\n + \ \n b38e5ce\n \n
\n
\n + \
\n
\n
#6412
+        changed our circleci configuration setup to be dynamic, but this\ninadvertently
+        removed the `coverage` and `riot_run_latest` circleci\npipeline parameters
+        from the main `.circleci/config.yml` file, which\nbreaks our nightly 1.x coverage
+        pipeline runs. This PR re-adds those\nparameters back and re-enables coverage
+        reporting.\n\nNote that `datastreams`, `langchain`, `elasticsearch`,\n`integration-snapshot`
+        test suites are still failing on 1.x nightly\ncoverage runs and will need
+        to be fixed.\n\n## Checklist\n\n- [x] Change(s) are motivated and described
+        in the PR description.\n- [x] Testing strategy is described if automated tests
+        are not included\nin the PR.\n- [x] Risk is outlined (performance impact,
+        potential for breakage,\nmaintainability, etc).\n- [x] Change is maintainable
+        (easy to change, telemetry, documentation).\n- [x] [Library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\nare
+        followed. If no release note is required, add label\n`changelog/no-changelog`.\n-
+        [x] Documentation is included (in-code, generated user docs, [public\ncorp
+        docs](https://github.com/DataDog/documentation/)).\n-
+        [x] Backport labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n##
+        Reviewer Checklist\n\n- [x] Title is accurate.\n- [x] No unnecessary changes
+        are introduced.\n- [x] Description motivates each change.\n- [x] Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges
+        unless absolutely necessary.\n- [x] Testing strategy adequately addresses
+        listed risk(s).\n- [x] Change is maintainable (easy to change, telemetry,
+        documentation).\n- [x] Release note makes sense to a user of the library.\n-
+        [x] Reviewer has explicitly acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment.\n- [x] Backport labels
+        are set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
\n + \
\n
\n\n
\n
\n
\n\n\n\n
\n\n\n\n \n
\n
\n \n
+ \
\n\n\n\n \n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n \n\n\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@brettlangdon\"\n \n brettlangdon\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n brettlangdon approved these changes\n\n + \

\n

\n \n\n \n \"@majorgreys\"\n \n majorgreys\n\n\n + \ Awaiting requested review from majorgreys\n\n + \ majorgreys is a code owner automatically + assigned from DataDog/apm-core-python\n\n \n

\n + \

\n \n\n \n \"@jbertran\"\n \n jbertran\n\n\n + \ Awaiting requested review from jbertran\n\n + \ jbertran was automatically assigned from + DataDog/apm-framework-integrations-reviewers-py\n\n \n

\n + \

\n \n\n \n \"@emmettbutler\"\n \n emmettbutler\n\n\n + \ Awaiting requested review from emmettbutler\n\n\n + \ \n

\n\n \n
\n\n
\n\n\n
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n \n\n changelog/no-changelog\n\n + \ A changelog entry is not required for + this PR.\n\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 4 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n\n\n\n\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:36 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=l3QKY0YBtVa6g6vTxtaD7V81b1rqVKbVC2TuUprbrsBeLyGMxD93o4PKJuuuX8DsRurIz%2BgmK%2Bu2SsrLsbekGNnDfnlY8nmv6JixFA0imSoyuXwZ1hoQQntsqmb%2BY5H2ZmdVcxjGdx4KfXsgsWYyampVlxVtj8kcqBXpQ1EmwL8bxCXFb1Ua2ljpQIrEF0vAkXxAKjJvD9Nkk%2BoV9Oq9FDdyOTS5F09seblwdhXqyPUiRtK%2F47XQlwOGT%2Bbx3gQZd0o0tqUnOHebKKHm1e8WeA%3D%3D--cOTu%2FzH%2Bqi016usb--raQjHHicfYwN6TAxmM%2B1hg%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.165126635.1734014562; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:42 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:42 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED48:2AD30D:249761D:33182B5:675AF662 + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=259.185408,conversation_content-fragment;desc="conversation_content + fragment";dur=1167.36918,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=278.203377,nginx;desc="NGINX";dur=1.232025,glb;desc="GLB";dur=3.090931 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/11534 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n fix(asm): add global states to ensure patching once [backport + 2.15] by christophe-papazian \xB7 Pull Request #11534 \xB7 DataDog/dd-trace-py + \xB7 GitHub\n\n\n\n \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n fix(asm): + add global states to ensure patching once [backport 2.15]\n #11534\n

\n
\n
\n\n + \
\n + \
\n + \ \n Merged\n\n + \
\n\n\n\n\n
\n gnufede\n merged 3 commits into\n\n\n 2.15\n\nfrom\n\nbackport-11522-to-2.15\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Nov 26, + 2024\n\n\n
\n
\n\n\n \n\n\n\n
\n
\n
\n
\n + \
\n \n Merged\n\n + \
\n\n\n\n\n
\n + \

\n \n fix(asm): add global states to ensure patching once [backport + 2.15]\n \n #11534\n

\n\n + \
\n gnufede\n merged 3 commits into\n\n\n 2.15\n\nfrom\n\nbackport-11522-to-2.15\n \n \n \n\n \n \n\n + \
\n
\n\n\n + \ Nov 26, + 2024\n\n\n
\n
\n
\n + \
\n
\n
\n
\n
\n\n\n\n + \ \n
\n
\n \n \n +74\n + \ \n \n \u221210\n + \ \n \n \n + \ \n \n
\n\n \n
\n\n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"christophe-papazian\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \"@christophe-papazian\"\n\n \n + \ christophe-papazian\n \n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited\n \n + \ \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

Backport 81824b8 + from #11522 to 2.15.

\n

Ensure common patches for SCA and Exploit Prevention are loaded..

\n

only once
\nonly if exploit prevention is active or sca is + active
\nChanges:

\n

factorize load_common_modules logic + in ddtrace.appsec
\nboolean state for patch_common_module and enable_iast_propagation + to ensure they are only called once.
\nensure it's loaded after one click + activation
\nensure it's properly loaded in unit tests if required
\nadd + some failsafe for iast in wrap_open for importerror
\nupdate an iast test + to reflect that common_modules is loaded in the test by default.
\nAPPSEC-55997

\n

Checklist

\n
    \n
  • PR author has checked that all the criteria below are met
  • \n
  • The + PR description includes an overview of the change
  • \n
  • The PR description + articulates the motivation for the change
  • \n
  • The change includes tests + OR the PR description describes a testing strategy
  • \n
  • The PR description + notes risks associated with the change, if any
  • \n
  • Newly-added code + is easy to change
  • \n
  • The change follows the library release note guidelines
  • \n
  • The change + includes or references documentation updates if necessary
  • \n
  • Backport + labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Reviewer + has checked that all the criteria below are met
  • \n
  • Title is accurate
  • \n
  • All + changes are related to the pull request's stated goal
  • \n
  • Avoids breaking + API changes
  • \n
  • Testing strategy adequately addresses + listed risks
  • \n
  • Newly-added code is easy to change
  • \n
  • Release + note makes sense to a user of the library
  • \n
  • If necessary, author has + acknowledged and discussed the performance implications of this PR as reported + in the benchmarks PR comment
  • \n
  • Backport labels are set in a manner + that is consistent with the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n + \ \n
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@christophe-papazian\"\n + \
\n
\n\n
\n \n + \ fix(asm): + add global states to ensure patching once (#11522)\n + \ \n\n \n + \ + \ \n
\n\n
\n \n\n + \ \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ cd59645\n \n
\n
\n + \
\n
\n
Ensure common patches for SCA and Exploit Prevention are loaded..\n-
+        only once\n- only if exploit prevention is active or sca is active\n\nChanges:\n-
+        factorize load_common_modules logic in ddtrace.appsec\n- boolean state for
+        patch_common_module and enable_iast_propagation to\nensure they are only called
+        once.\n- ensure it's loaded after one click activation\n- ensure it's properly
+        loaded in unit tests if required\n- add some failsafe for iast in wrap_open
+        for importerror\n- update an iast test to reflect that common_modules is loaded
+        in the\ntest by default.\n\nAPPSEC-55997\n\n- [x] PR author has checked that
+        all the criteria below are met\n- The PR description includes an overview
+        of the change\n- The PR description articulates the motivation for the change\n-
+        The change includes tests OR the PR description describes a testing\nstrategy\n-
+        The PR description notes risks associated with the change, if any\n- Newly-added
+        code is easy to change\n- The change follows the [library release note\nguidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html)\n-
+        The change includes or references documentation updates if necessary\n- Backport
+        labels are set (if\n[applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting))\n\n-
+        [x] Reviewer has checked that all the criteria below are met\n- Title is accurate\n-
+        All changes are related to the pull request's stated goal\n- Avoids breaking\n[API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces)\nchanges\n-
+        Testing strategy adequately addresses listed risks\n- Newly-added code is
+        easy to change\n- Release note makes sense to a user of the library\n- If
+        necessary, author has acknowledged and discussed the performance\nimplications
+        of this PR as reported in the benchmarks PR comment\n- Backport labels are
+        set in a manner that is consistent with the\n[release branch maintenance\npolicy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)\n\n(cherry
+        picked from commit 81824b8)
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n marked + this pull request as ready for review\n\n November + 25, 2024 16:51 \n \n
\n
\n
\n + \ \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n requested + review from\n a team\n\n + \ as code owners\n\n\n + \ November 25, 2024 16:51 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@christophe-papazian\"\n christophe-papazian\n\n\n requested + review from\n gnufede + and \n emmettbutler\n\n\n\n + \ November 25, 2024 16:51 \n + \ \n
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@github-actions\"\n\n \n + \ \"GitHub\n \n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n github-actions\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

CODEOWNERS have + been resolved as:

\n
releasenotes/notes/exploit_prevention_patch_fix-1bdd7540e1d085d8.yaml
+        \  @DataDog/apm-python\nddtrace/_monkey.py                                                      @DataDog/apm-core-python\nddtrace/appsec/__init__.py
+        \                                             @DataDog/asm-python\nddtrace/appsec/_common_module_patches.py
+        \                               @DataDog/asm-python\nddtrace/appsec/_iast/__init__.py
+        \                                       @DataDog/asm-python\nddtrace/appsec/_remoteconfiguration.py
+        \                                 @DataDog/asm-python\ntests/appsec/integrations/test_flask_telemetry.py
+        \                      @DataDog/asm-python\ntests/utils.py                                                          @DataDog/python-guild\n
\n\n + \
\n
\n\n\n
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@datadog-dd-trace-py-rkomorn\"\n\n
\n\n\n + \
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n datadog-dd-trace-py-rkomorn\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Datadog Report

\n

Branch report: + backport-11522-to-2.15
\nCommit + report: c476a58
\nTest + service: dd-trace-py

\n

\u2705 + 0 Failed, 592 Passed, 694 Skipped, 19m 30.54s Total duration (15m 23.31s time + saved)

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"gnufede\"\n + \
\n \n
\n + \
\n + \ \n gnufede\n \n\n + \ \n\n approved these changes\n\n\n \n \n + \ Nov + 25, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n \n
\n + \ \n
\n + \ \n
\n
\n \"@gnufede\"\n gnufede\n\nenabled + auto-merge (squash)\n\n November + 25, 2024 17:32 \n \n
\n
\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@pr-commenter\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n pr-commenter\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Nov 25, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Benchmarks

\n

Benchmark execution + time: 2024-11-26 21:13:50

\n

Comparing candidate commit + c476a58 + in PR branch backport-11522-to-2.15 with + baseline commit b462888 + in branch 2.15.

\n

Found + 0 performance improvements and 0 performance regressions! Performance is the + same for 371 metrics, 53 unstable metrics.

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"erikayasuda\"\n + \
\n \n
\n + \
\n + \ \n erikayasuda\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Nov + 26, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n + \ \n
\n
\n \n
\n \n
\n + \
christophe-papazian\n \nand others\n + \ added 2 commits\n November + 26, 2024 18:39
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@christophe-papazian\"\n + \
\n
\n\n \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ 3ac9ef8\n \n
\n
\n + \
\n
\n\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@erikayasuda\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ c476a58\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n + \ \n + \ \n\n + \
\n
\n\n \n + \ \"@gnufede\"\n gnufede\n\n\n\n + \ merged commit 2d6800f\n into\n\n \n \n 2.15\n \n\n\n Nov 26, 2024\n\n
\n 584 checks passed\n
\n\n
\n + \ \n \n + \ \n \n + \ \n\n + \ \n
\n
\n
\n\n
\n\n + \
\n \n
\n \n
\n
\"@gnufede\"\n gnufede\n\n\n + \ \n deleted the\n \n + \ \n backport-11522-to-2.15\n \n branch\n\n + \ November 26, 2024 21:16 \n + \ \n
\n
\n\n\n
\n\n\n\n\n\n \n
\n
\n \n
+ \
\n\n\n\n
\n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n
\n\n
\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@erikayasuda\"\n \n erikayasuda\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n erikayasuda approved these changes\n\n + \

\n

\n \n\n \n \"@gnufede\"\n \n gnufede\n\n\n\n \n + \ \n \n + \ \n\n \n \n gnufede approved these changes\n\n + \

\n

\n \n\n \n \"@emmettbutler\"\n \n emmettbutler\n\n\n + \ Awaiting requested review from emmettbutler\n\n + \ emmettbutler is a code owner automatically + assigned from DataDog/apm-python\n\n \n

\n\n \n
\n\n
\n\n\n + \
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n None yet\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 3 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n
\n\n\n\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:37 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=lcakJUZEMJACUB16KjyZXhnaDJ7Lf%2FYHyuliVSpr%2BebjFOJkqrrGVy310bXG4sCIwd5suyAhSq1ar47KgrE92K2xy%2FyLkV0kyOGj2HTHZLBE0AoalTEwk%2FtwXY2eTPd4xUPomg0vtlqQrrYnQNHrj9IxaNsg225S2Xxjw2F05HFwCLbhj4Tdo2o8BBOYJeV2WH8GGT4bJ6XT0VeQRP3trJrZhY9WOPmlbyZ0k%2Biokd%2By3Tr6Fld4rQ3BKKJ6Nq%2FEfMuSc4M5FDuoXJzxMyyAGg%3D%3D--VglcwRFwLrbj7fn0--vsOdxQkYekEyYwll%2BqS%2B1A%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.1476856324.1734014562; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:42 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:42 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED4A:356904:24669B2:32ED588:675AF662 + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=450.768495,conversation_content-fragment;desc="conversation_content + fragment";dur=576.513283,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=305.288275,nginx;desc="NGINX";dur=1.093278,glb;desc="GLB";dur=4.679312 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Connection: + - close + Host: + - github.com + method: GET + uri: https://github.com/DataDog/dd-trace-py/pull/11690 + response: + body: + string: "\n\n\n\n\n\n\n\n\n\n\n\n \n \n + \ \n \n \n \n + \ \n + \ \n\n + \ \n\n \n\n \n \n \n \n \n\n\n \n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n\n\n\n\n ci: store fake DD_API_KEY as a secret by brettlangdon \xB7 + Pull Request #11690 \xB7 DataDog/dd-trace-py \xB7 GitHub\n\n\n\n \n \n \n\n \n \n\n\n + \ \n\n\n \n\n\n \n \n\n \n \n\n + \ \n\n\n\n \n\n \n\n\n\n\n \n\n \n\n \n\n + \ \n\n \n\n \n\n \n \n \n\n \n \n \n\n\n\n\n \n\n\n\n + \ \n\n\n \n \n \n \n\n \n\n \n + \ \n\n + \ \n\n\n\n \n\n \n\n\n \n\n \n\n \n \n + \ \n\n\n\n\n\n \n\n + \ \n\n \n
\n \n\n\n
\n Skip to content\n\n + \ \n \n + \ \n \n \n\n\n\n\n \n \n + \
\n\n\n\n\n\n + \ \n\n \n\n \n\n\n
\n

Navigation Menu

\n\n \n\n + \
\n
\n
\n + \ \n
\n\n \n + \ \n + \ \n\n + \ \n\n
\n \n Sign in\n \n
\n
\n\n\n + \
\n
\n + \ \n\n
\n \n\n\n\n \n \n
\n \n \n\n + \
\n Search + or jump to...\n
\n + \ \n\n + \
\n \n\n \n\n \n
\n \n + \

Search + code, repositories, users, issues, pull requests...

\n
\n \n
+ \
\n
\n \n
\n \n \n \n \n \n\n \n
\n
\n
\n
\n + \ \n
\n + \
\n Clear\n + \ \n\n + \
\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n + \
\n \n + \
\n + \
\n
\n\n \n + \
\n
\n\n
\n
\n
\n \n
\n + \ \n\n \n
\n + \
\n
\n + \

\n Provide + feedback\n

\n \n
\n
\n + \ \n
\n
\n + \ \n
\n \n + \
\n

We read every piece of feedback, and take your input very + seriously.

\n \n \n + \ \n
\n
\n + \ \n
\n\n \n \n\n + \ \n
\n
\n + \
\n

\n Saved searches\n

\n + \

Use + saved searches to filter your results more quickly

\n
\n
\n \n + \
\n
\n \n
\n \n + \
\n\n \n\n
\n + \
\n
\n\n
\n + \
\n \n
\n + \
\n
\n\n\n
\n \n Sign in\n \n + \
\n\n \n Sign + up\n \n \n
\n + \
\n
\n \n\n\n \n \n\n + \
\n\n\n\n\n\n\n\n\n + \
\n\n\n + \ \n\n\n + \ \n
\n\n\n + \ \n\n\n\n\n\n\n \n
\n
\n \n \n\n\n\n + \ \n \n\n \n\n\n\n\n\n\n \n
\n\n
\n\n + \
\n \n
\n + \ \n \n\n + \ \n \n + \ \n DataDog\n + \ \n /\n + \ \n dd-trace-py\n \n\n Public\n
\n\n\n + \
\n\n
\n \n\n + \
\n
\n\n
\n
\n\n\n \n\n + \
\n\n \n\n\n\n\n
\n \n\n\n\n \n \n
\n \n\n
\n \n \n \n\n
\n
\n
\n\n \n
\n \n \n New issue\n \n \n + \
\n
\n \n \n\n
\n\n
\n

\n Have a question + about this project? Sign up for a free GitHub account to open an + issue and contact its maintainers and the community.\n

\n\n \n\n

By + clicking “Sign up for GitHub”, you agree to our terms of service + and\n privacy statement. We\u2019ll occasionally send you + account related emails.

\n\n

\n + \ Already on GitHub?\n Sign + in\n to your account\n

\n
\n\n
\n
\n
\n + \ \n + \
\n\n

\n ci: + store fake DD_API_KEY as a secret\n #11690\n + \

\n
\n
\n\n
\n
\n \n + Open\n\n
\n\n\n\n\n
\n brettlangdon\n\n wants to merge\n 1\n + \ commit into\n\n\n main\n\n + \
\n
\n + \ \n base:\n + \ main\n \n + \ \n \n + \ \n
\n
\n + \
\n Choose + a base branch\n \n
\n\n + \ \n
\n + \ \n
\n\n \n \n\n
\n \n\n \n\n \n\n\n
\n
\n \n + \ \n \n \n Loading\n\n + \
\n
\n\n \n\n\n \n\n + \
\n\n \n
\n + \
\n
\n
\n\n \n + \
\n
\n\n
\n \n
\n\nfrom\n\nbrettlangdon-patch-3\n \n \n \n\n \n \n\n + \
\n
\n\n\n\n + \ \n \n\n\n\n\n\n\n\n\n \n \n + \
\n\n\n + \
\n\n
\n
\n\n\n \n\n\n\n
\n
\n + \
\n + \
\n + \
\n \n Open\n\n
\n\n\n\n\n + \
\n

\n \n + \ ci: store fake DD_API_KEY as a secret\n \n #11690\n

\n\n
\n brettlangdon\n\n + \ wants to merge\n 1\n + \ commit into\n\n\n main\n\nfrom\n\nbrettlangdon-patch-3\n \n \n \n\n \n \n\n + \
\n
\n\n\n\n\n + \
\n
\n
\n
\n + \
\n
\n
\n
\n\n\n\n \n
\n
\n \n \n +2\n \n \n \u22122\n \n \n + \ \n \n \n + \
\n\n \n
\n\n\n\n
\n + \
\n

Conversation

\n + \ \n \n\n\n \n\n
\n\n
\n \"brettlangdon\"\n + \ \n \n
\n + \
\n
\n
\n
\n \n \n \n\n \n\n\n \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n
\n\n

\n
\n \"@brettlangdon\"\n\n \n brettlangdon\n \n\n \n\n \n\n + \ commented\n\n\n Dec 12, 2024\n\n\n \n + \ \n\n
\n + \ \n
\n \n edited\n \n + \ \n \n \n\n
\n
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n
\n + \
\n \n \n + \
\n

Checklist

\n
    \n
  • PR author + has checked that all the criteria below are met
  • \n
  • The PR description + includes an overview of the change
  • \n
  • The PR description articulates + the motivation for the change
  • \n
  • The change includes tests OR the PR + description describes a testing strategy
  • \n
  • The PR description notes + risks associated with the change, if any
  • \n
  • Newly-added code is easy + to change
  • \n
  • The change follows the library release note guidelines
  • \n
  • The change + includes or references documentation updates if necessary
  • \n
  • Backport + labels are set (if applicable)
  • \n
\n

Reviewer Checklist

\n
    \n
  • Reviewer + has checked that all the criteria below are met
  • \n
  • Title is accurate
  • \n
  • All + changes are related to the pull request's stated goal
  • \n
  • Avoids breaking + API changes
  • \n
  • Testing strategy adequately addresses + listed risks
  • \n
  • Newly-added code is easy to change
  • \n
  • Release + note makes sense to a user of the library
  • \n
  • If necessary, author has + acknowledged and discussed the performance implications of this PR as reported + in the benchmarks PR comment
  • \n
  • Backport labels are set in a manner + that is consistent with the release branch maintenance policy
  • \n
\n
\n + \
\n \n
\n\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n\n + \
\n
\n
\n \n
\n
\n + \ \n
\n
\n
\n + \
\n\n
\n
\n
\n\n\n \n\n \n
\n\n\n
\n + \ \n
\n
\n
\n \n
\n \n
\n
\n
\n
\n \n
\n
\n \n \"@brettlangdon\"\n
\n
\n\n + \ \n\n + \
\n \n\n \n \n \n\n \n\n \n\n\n + \
\n\n
\n + \
\n + \ \n\n + \ \n \n \n\n \n\n
\n \n
\n\n
\n + \
\n
\n\n
\n \n + \ a6675d3\n \n
\n
\n + \
\n
\n\n
\n
\n
\n\n\n
\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \
\n\n \n\n \"@brettlangdon\"\nbrettlangdon\n\n\n\n\n added\n the \n\n changelog/no-changelog\n\n A changelog + entry is not required for this PR.\n label\n\n\n Dec 12, 2024\n\n
\n
\n\n\n + \
\n \n
\n \n
\n + \
\"@brettlangdon\"\n brettlangdon\n\n\n requested review from\n + \ a team\n\n as code owners\n\n\n + \ December 12, 2024 13:39 \n + \ \n
\n
\n
\n \n
\n \n
\n + \
\"@brettlangdon\"\n brettlangdon\n\n\n requested review from\n + \ avara1986 + and \n erikayasuda\n\n\n\n + \ December 12, 2024 13:39 \n + \ \n
\n
\n\n\n
\n\n
\n \n \n
\n\n
\n + \ \"@github-actions\"\n\n \n + \ \"GitHub\n \n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n + \ Contributor\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n github-actions\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

CODEOWNERS have + been resolved as:

\n
.github/workflows/system-tests.yml
+        \                                     @DataDog/python-guild @DataDog/apm-core-python\n
\n\n + \
\n
\n\n\n
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n \n
\n \"romainkomorndatadog\"\n + \
\n \n
\n + \
\n + \ \n romainkomorndatadog\n + \ \n\n \n\n approved these changes\n\n\n \n \n + \ Dec + 12, 2024\n \n \n \n + \
\n\n \n
\n
\n\n
\n \n \n
\n
\n
\n
\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@datadog-dd-trace-py-rkomorn\"\n\n
\n\n\n + \
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n datadog-dd-trace-py-rkomorn\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Datadog Report

\n

Branch report: + brettlangdon-patch-3
\nCommit + report: a6675d3
\nTest + service: dd-trace-py

\n

\u2705 + 0 Failed, 55 Passed, 1413 Skipped, 1m 29.81s Total duration (35m 20.17s time + saved)

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@brettlangdon\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n + \ \n Member\n\n\n \n\n Author\n\n\n + \
\n\n

\n
\n + \ \n\n \n brettlangdon\n + \ \n\n \n\n \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

/merge

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n \n
\n\n
\n + \ \"@dd-devflow\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n dd-devflow\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n \n\n
\n \n
\n + \ \n edited\n \n \n \n + \ \n\n
\n + \
\n \n \n \n + \ \n \n + \ \n Loading\n\n \n \n + \
\n
\n\n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \ \n

Devflow running: /merge

\n

View all feedbacks in Devflow UI.

\n
\n

2024-12-12 13:54:30 UTC \u2139\uFE0F MergeQueue: + waiting for PR to be ready

\n

This merge request is not + mergeable yet, because of pending checks/missing approvals. It will be added + to the queue as soon as checks pass and/or get approvals.
\nNote: + if you pushed new commits since the last approval, you may need additional + approval.
\nYou can remove it from the waiting list with /remove + command.

\n

Use /merge -c + to cancel this operation!

\n
\n

2024-12-12 + 14:26:14 UTC \u2139\uFE0F MergeQueue: merge request + added to the queue

\n

The median merge time in main + is 34m.

\n

Use /merge -c + to cancel this operation!

\n
\n

\u23F3 + command still in progress ...

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \
\n\n \n\n \"@dd-devflow\"\ndd-devflow\nbot\n\n\n\n added\n the \n\n mergequeue-status: waiting\n\n label\n\n\n Dec 12, 2024\n\n
\n
\n\n\n\n\n
\n\n + \
\n \n \n
\n\n
\n + \ \"@pr-commenter\"\n\n
\n\n\n
\n\n
\n + \
\n + \
\n + \
\n + \ \n + \ \n \n\n \n\n\n + \ \n \n \n \n Copy + link\n\n
\n
\n + \
\n
\n\n
\n \n\n\n\n + \ \n\n
\n\n

\n + \
\n \n\n \n pr-commenter\n + \ bot\n\n \n\n + \ \n\n commented\n\n\n Dec 12, 2024\n\n\n + \ \n
\n\n

\n
\n\n\n
\n\n + \ \n\n \n \n + \ \n \n \n
\n + \

Benchmarks

\n

Benchmark execution + time: 2024-12-12 14:24:20

\n

Comparing candidate commit + a6675d3 + in PR branch brettlangdon-patch-3 with + baseline commit 385d8e0 + in branch main.

\n

Found + 0 performance improvements and 0 performance regressions! Performance is the + same for 394 metrics, 2 unstable metrics.

\n
\n
\n\n\n + \
\n\n \n\n
\n + \
\n
\n \n \n
\n
\n \n
\n
\n
\n
\n + \
\n
\n\n
\n \n + \

\n \n + \ \n \n \n\n

\n \n\n\n + \
\n
\n\n
\n\n\n
\n\n\n
\n + \ \n
\n
\n \n + \ \n\n
\n + \ \n
\n\n\n\n\n
\n\n\n\n\n\n + \ \n
\n
\n \n
+ \
\n\n\n\n
\n\n
\n + \
\n
\n \n Sign up for free\n to join + this conversation on GitHub.\n Already have an account?\n Sign + in to comment\n\n\n \n
\n\n
\n
\n
\n\n
\n + \
\n
\n\n\n \n
\n \n
\n \n
\n Reviewers\n
\n\n \n\n\n + \

\n \n\n \n \"@romainkomorndatadog\"\n \n romainkomorndatadog\n\n\n\n + \ \n + \ \n \n + \ \n\n \n \n romainkomorndatadog approved these changes\n\n + \

\n

\n \n\n \n \"@avara1986\"\n \n avara1986\n\n\n + \ Awaiting requested review from avara1986\n\n + \ avara1986 is a code owner automatically + assigned from DataDog/python-guild\n\n \n

\n

\n \n\n \n \"@erikayasuda\"\n \n erikayasuda\n\n\n + \ Awaiting requested review from erikayasuda\n\n + \ erikayasuda is a code owner automatically + assigned from DataDog/apm-core-python\n\n \n

\n\n + \ \n
\n\n
\n\n\n
\n
\n\n \n
\n Assignees\n + \
\n\n\n \n\n + \ No one assigned\n\n\n\n
\n\n\n \n\n \n\n\n
\n Labels\n
\n\n\n
\n \n\n changelog/no-changelog\n\n + \ A changelog entry is not required for + this PR.\n \n\n mergequeue-status: + in_progress\n\n\n
\n\n
\n\n\n \n\n
\n
\n
\n Projects\n + \
\n\n
\n
\n\n None yet\n\n\n\n
\n\n\n + \ \n
\n
\n \n
\n Milestone\n + \
\n\n No milestone\n\n
\n\n\n \n \n \n
\n
\n \n
\n \n
\n Development\n + \
\n\n\n \n\n

Successfully merging this pull request may + close these issues.

\n\n\n \n\n
+ \
\n
\n
\n\n \n \n\n + \ \n\n \n
\n + \
\n
\n 2 participants\n
\n \n
\n
\n\n\n\n + \ \n\n \n\n\n\n\n \n\n
\n\n\n
\n \n \n \n + \ \n\n\n + \ \n\n\n \n\n\n\n\n \n \n\n + \ \n\n
\n

Footer

\n\n \n\n\n
\n
\n \n \n \n\n\n + \ \n © 2024 GitHub, Inc.\n \n
\n\n + \ \n
\n
\n\n\n\n\n \n\n\n \n\n + \ \n\n
\n + \
\n
\n
\n\n \n\n\n\n\n\n \n\n
\n + \
\n \n\n\n" + headers: + Accept-Ranges: + - bytes + Cache-Control: + - no-cache + Content-Security-Policy: + - 'default-src ''none''; base-uri ''self''; child-src github.com/assets-cdn/worker/ + github.com/webpack/ github.com/assets/ gist.github.com/assets-cdn/worker/; + connect-src ''self'' uploads.github.com www.githubstatus.com collector.github.com + raw.githubusercontent.com api.github.com github-cloud.s3.amazonaws.com github-production-repository-file-5c1aeb.s3.amazonaws.com + github-production-upload-manifest-file-7fdce7.s3.amazonaws.com github-production-user-asset-6210df.s3.amazonaws.com + *.rel.tunnels.api.visualstudio.com wss://*.rel.tunnels.api.visualstudio.com + objects-origin.githubusercontent.com copilot-proxy.githubusercontent.com proxy.individual.githubcopilot.com + proxy.business.githubcopilot.com proxy.enterprise.githubcopilot.com *.actions.githubusercontent.com + wss://*.actions.githubusercontent.com productionresultssa0.blob.core.windows.net/ + productionresultssa1.blob.core.windows.net/ productionresultssa2.blob.core.windows.net/ + productionresultssa3.blob.core.windows.net/ productionresultssa4.blob.core.windows.net/ + productionresultssa5.blob.core.windows.net/ productionresultssa6.blob.core.windows.net/ + productionresultssa7.blob.core.windows.net/ productionresultssa8.blob.core.windows.net/ + productionresultssa9.blob.core.windows.net/ productionresultssa10.blob.core.windows.net/ + productionresultssa11.blob.core.windows.net/ productionresultssa12.blob.core.windows.net/ + productionresultssa13.blob.core.windows.net/ productionresultssa14.blob.core.windows.net/ + productionresultssa15.blob.core.windows.net/ productionresultssa16.blob.core.windows.net/ + productionresultssa17.blob.core.windows.net/ productionresultssa18.blob.core.windows.net/ + productionresultssa19.blob.core.windows.net/ github-production-repository-image-32fea6.s3.amazonaws.com + github-production-release-asset-2e65be.s3.amazonaws.com insights.github.com + wss://alive.github.com api.githubcopilot.com api.individual.githubcopilot.com + api.business.githubcopilot.com api.enterprise.githubcopilot.com; font-src + github.githubassets.com; form-action ''self'' github.com gist.github.com copilot-workspace.githubnext.com + objects-origin.githubusercontent.com; frame-ancestors ''none''; frame-src + viewscreen.githubusercontent.com notebooks.githubusercontent.com; img-src + ''self'' data: blob: github.githubassets.com media.githubusercontent.com camo.githubusercontent.com + identicons.github.com avatars.githubusercontent.com private-avatars.githubusercontent.com + github-cloud.s3.amazonaws.com objects.githubusercontent.com secured-user-images.githubusercontent.com/ + user-images.githubusercontent.com/ private-user-images.githubusercontent.com + opengraph.githubassets.com github-production-user-asset-6210df.s3.amazonaws.com + customer-stories-feed.github.com spotlights-feed.github.com objects-origin.githubusercontent.com + *.githubusercontent.com; manifest-src ''self''; media-src github.com user-images.githubusercontent.com/ + secured-user-images.githubusercontent.com/ private-user-images.githubusercontent.com + github-production-user-asset-6210df.s3.amazonaws.com gist.github.com; script-src + github.githubassets.com; style-src ''unsafe-inline'' github.githubassets.com; + upgrade-insecure-requests; worker-src github.com/assets-cdn/worker/ github.com/webpack/ + github.com/assets/ gist.github.com/assets-cdn/worker/' + Content-Type: + - text/html; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:38 GMT + Referrer-Policy: + - no-referrer-when-downgrade + Server: + - GitHub.com + Set-Cookie: + - _gh_sess=SRp9AZpG%2B9PjbOI2DwrEGHPVoSPO1RQxFghqLR7KL1Fy058969XVQivgCdTFTsevR18tNXoZ%2FKyRkxCnOi2HhrErMbXOcrBwL5FA5%2FuR4HL8V1NhpBn75oTzynU53VGcHD6m7%2BIlieWYdCDurncYFhjKC%2FyJMwrbWCv8a%2BqwOdUGnXDfrkHq9if6PsYS6W3SV3HjEy72OBGtOU%2FpHCZOngO5mPkK52xmJZd5cZuqoLImJBzkm8LUbVPQjcLWKerz3McWy5a71T9kSEnN3Z0www%3D%3D--5w9odkPS8zIP4Vqv--zkwZym8AQTQ3LMLwY5hRjw%3D%3D; + Path=/; HttpOnly; Secure; SameSite=Lax + - _octo=GH1.1.172292125.1734014562; Path=/; Domain=github.com; Expires=Fri, + 12 Dec 2025 14:42:42 GMT; Secure; SameSite=Lax + - logged_in=no; Path=/; Domain=github.com; Expires=Fri, 12 Dec 2025 14:42:42 + GMT; HttpOnly; Secure; SameSite=Lax + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - X-PJAX, X-PJAX-Container, Turbo-Visit, Turbo-Frame, Accept-Encoding, Accept, + X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Request-Id: + - ED4B:321365:23B1314:320365D:675AF662 + X-XSS-Protection: + - '0' + connection: + - close + server-timing: + - pull_request_layout-fragment;desc="pull_request_layout fragment";dur=412.175919,conversation_content-fragment;desc="conversation_content + fragment";dur=448.910543,conversation_sidebar-fragment;desc="conversation_sidebar + fragment";dur=302.334653,nginx;desc="NGINX";dur=1.331055,glb;desc="GLB";dur=3.067062 + x-voltron-version: + - 69a2227 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/6388/files?page=1 + response: + body: + string: '[{"sha":"1325b0864ebc6d4c40970f698018ac2524fe4e33","filename":"ddtrace/debugging/_expressions.py","status":"modified","additions":2,"deletions":2,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/ddtrace%2Fdebugging%2F_expressions.py","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/ddtrace%2Fdebugging%2F_expressions.py","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/ddtrace%2Fdebugging%2F_expressions.py?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -292,8 +292,8 @@ def _compile_operation(ast):\n \n def _compile_literal(ast):\n # + type: (DDASTType) -> Optional[List[Instr]]\n- # literal => | + true | false | \"string\"\n- if not isinstance(ast, (str, int, float, bool)):\n+ # + literal => | true | false | \"string\" | null\n+ if not (isinstance(ast, + (str, int, float, bool)) or ast is None):\n return None\n \n return + [Instr(\"LOAD_CONST\", ast)]"},{"sha":"b4517ad79f67a2b362360ae8e7e0b0b3fa2e4ea8","filename":"releasenotes/notes/fix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","status":"added","additions":4,"deletions":0,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/releasenotes%2Fnotes%2Ffix-debugger-expressions-none-literal-30f3328d2e386f40.yaml?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -0,0 +1,4 @@\n+---\n+fixes:\n+ - |\n+ dynamic instrumentation: handle + null literal in conditions and expressions."},{"sha":"3c4d96fe66b871238c02651af82d43a1ad8085c3","filename":"tests/debugging/test_expressions.py","status":"modified","additions":1,"deletions":0,"changes":1,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/2eb060881fdd94f4f717ae19549b598317b74d30/tests%2Fdebugging%2Ftest_expressions.py","raw_url":"https://github.com/DataDog/dd-trace-py/raw/2eb060881fdd94f4f717ae19549b598317b74d30/tests%2Fdebugging%2Ftest_expressions.py","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/tests%2Fdebugging%2Ftest_expressions.py?ref=2eb060881fdd94f4f717ae19549b598317b74d30","patch":"@@ + -72,6 +72,7 @@ def __getitem__(self, name):\n # Test argument predicates + and operations\n ({\"contains\": [{\"ref\": \"payload\"}, \"hello\"]}, + {\"payload\": \"hello world\"}, True),\n ({\"eq\": [{\"ref\": \"hits\"}, + True]}, {\"hits\": True}, True),\n+ ({\"eq\": [{\"ref\": \"hits\"}, + None]}, {\"hits\": None}, True),\n ({\"substring\": [{\"ref\": \"payload\"}, + 4, 7]}, {\"payload\": \"hello world\"}, \"hello world\"[4:7]),\n ({\"any\": + [{\"ref\": \"collection\"}, {\"isEmpty\": {\"ref\": \"@it\"}}]}, {\"collection\": + [\"foo\", \"bar\", \"\"]}, True),\n ({\"startsWith\": [{\"ref\": \"local_string\"}, + \"hello\"]}, {\"local_string\": \"hello world!\"}, True),"}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '3264' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:43 GMT + ETag: + - '"85a10accfc7f3330efa4961171936a0e3ea39a94e59a1811461b17a9a610bdb4"' + Last-Modified: + - Sun, 08 Dec 2024 16:19:43 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED4D:111D81:1B766DC:3695A0A:675AF662 + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4894' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '106' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/6388/files?page=2 + response: + body: + string: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '2' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:43 GMT + ETag: + - '"4acd3c336ca9625e24fba0a2ea9cad06cf4693ace7e76d92c8a9a05f03c7b0cd"' + Last-Modified: + - Sun, 08 Dec 2024 16:19:43 GMT + Link: + - ; rel="prev", + ; rel="last", + ; rel="first" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED4E:1C31C4:1AEFA31:3584AFC:675AF663 + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4893' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '107' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/11690/files?page=1 + response: + body: + string: '[{"sha":"ce795db4fe24584e0a3c105f6f130071b1292cbe","filename":".github/workflows/system-tests.yml","status":"modified","additions":2,"deletions":2,"changes":4,"blob_url":"https://github.com/DataDog/dd-trace-py/blob/a6675d3799af44382bd5b677c56a94843a6433aa/.github%2Fworkflows%2Fsystem-tests.yml","raw_url":"https://github.com/DataDog/dd-trace-py/raw/a6675d3799af44382bd5b677c56a94843a6433aa/.github%2Fworkflows%2Fsystem-tests.yml","contents_url":"https://api.github.com/repos/DataDog/dd-trace-py/contents/.github%2Fworkflows%2Fsystem-tests.yml?ref=a6675d3799af44382bd5b677c56a94843a6433aa","patch":"@@ + -54,7 +54,7 @@ jobs:\n # system-tests requires an API_KEY, but it does + not have to be a valid key, as long as we don''t run a scenario\n # + that make assertion on backend data. Using a fake key allow to run system + tests on PR originating from forks.\n # If ever it''s needed, a valid + key exists in the repo, using ${{ secrets.DD_API_KEY }}\n- DD_API_KEY: + 1234567890abcdef1234567890abcdef\n+ DD_API_KEY: ${{ secrets.FAKE_DD_API_KEY + }}\n CMAKE_BUILD_PARALLEL_LEVEL: 12\n SYSTEM_TESTS_AWS_ACCESS_KEY_ID: + ${{ secrets.IDM_AWS_ACCESS_KEY_ID }}\n SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: + ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }}\n@@ -106,7 +106,7 @@ jobs:\n # + system-tests requires an API_KEY, but it does not have to be a valid key, + as long as we don''t run a scenario\n # that make assertion on backend + data. Using a fake key allow to run system tests on PR originating from forks.\n # + If ever it''s needed, a valid key exists in the repo, using ${{ secrets.DD_API_KEY + }}\n- DD_API_KEY: 1234567890abcdef1234567890abcdef\n+ DD_API_KEY: + ${{ secrets.FAKE_DD_API_KEY }}\n CMAKE_BUILD_PARALLEL_LEVEL: 12\n SYSTEM_TESTS_AWS_ACCESS_KEY_ID: + ${{ secrets.IDM_AWS_ACCESS_KEY_ID }}\n SYSTEM_TESTS_AWS_SECRET_ACCESS_KEY: + ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }}"}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '1930' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:43 GMT + ETag: + - '"e91026bdc9aa216ff163739444e03dfcf4e719131166fd717d6e5a7eafbd54fe"' + Last-Modified: + - Thu, 12 Dec 2024 14:26:20 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED50:122321:1C06DEB:37B36A8:675AF663 + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4892' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '108' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/vnd.github+json + Connection: + - close + Host: + - api.github.com + method: GET + uri: https://api.github.com/repos/datadog/dd-trace-py/pulls/11690/files?page=2 + response: + body: + string: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '2' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 12 Dec 2024 14:42:44 GMT + ETag: + - '"4acd3c336ca9625e24fba0a2ea9cad06cf4693ace7e76d92c8a9a05f03c7b0cd"' + Last-Modified: + - Thu, 12 Dec 2024 14:26:20 GMT + Link: + - ; rel="prev", + ; rel="last", + ; rel="first" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; format=json + X-GitHub-Request-Id: + - ED52:23893A:1B6422E:3678EE8:675AF664 + X-OAuth-Scopes: + - delete:packages, gist, read:org, read:packages, repo, workflow + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4891' + X-RateLimit-Reset: + - '1734015073' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '109' + X-XSS-Protection: + - '0' + connection: + - close + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - 178c6fc778ccc68e1d6a + status: + code: 200 + message: OK +version: 1 diff --git a/setup.py b/setup.py index 6b097d46f6b..dfaa5f6bf97 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ CURRENT_OS = platform.system() -LIBDDWAF_VERSION = "1.21.0" +LIBDDWAF_VERSION = "1.22.0" # DEV: update this accordingly when src/core upgrades libdatadog dependency. # libdatadog v14.1.0 requires rust 1.76. @@ -510,8 +510,11 @@ def get_exts_for(name): "ddtrace/profiling/collector/_memalloc.c", "ddtrace/profiling/collector/_memalloc_tb.c", "ddtrace/profiling/collector/_memalloc_heap.c", + "ddtrace/profiling/collector/_memalloc_reentrant.c", ], - extra_compile_args=debug_compile_args, + extra_compile_args=debug_compile_args + ["-D_POSIX_C_SOURCE=200809L", "-std=c11"] + if CURRENT_OS != "Windows" + else ["/std:c11"], ), Extension( "ddtrace.internal._threads", @@ -527,7 +530,7 @@ def get_exts_for(name): sources=[ "ddtrace/appsec/_iast/_stacktrace.c", ], - extra_compile_args=debug_compile_args, + extra_compile_args=extra_compile_args + debug_compile_args, ) ) @@ -553,7 +556,7 @@ def get_exts_for(name): ) # Echion doesn't build on 3.7, so just skip it outright for now - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 8) and sys.version_info < (3, 13): ext_modules.append( CMakeExtension( "ddtrace.internal.datadog.profiling.stack_v2._stack_v2", @@ -612,7 +615,11 @@ def get_exts_for(name): "ddtrace.profiling.collector.stack", sources=["ddtrace/profiling/collector/stack.pyx"], language="c", - extra_compile_args=extra_compile_args, + # cython generated code errors on build in toolchains that are strict about int->ptr conversion + # OTOH, the MSVC toolchain is different. In a perfect world we'd deduce the underlying toolchain and + # emit the right flags, but as a compromise we assume Windows implies MSVC and everything else is on a + # GNU-like toolchain + extra_compile_args=extra_compile_args + (["-Wno-int-conversion"] if CURRENT_OS != "Windows" else []), ), Cython.Distutils.Extension( "ddtrace.profiling.collector._traceback", diff --git a/src/core/Cargo.lock b/src/core/Cargo.lock index 27f510e5ddc..f840798f96e 100644 --- a/src/core/Cargo.lock +++ b/src/core/Cargo.lock @@ -14,12 +14,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" -[[package]] -name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" - [[package]] name = "bytes" version = "1.6.1" @@ -46,7 +40,7 @@ version = "0.1.0" dependencies = [ "datadog-ddsketch", "pyo3", - "pyo3-build-config", + "pyo3-build-config 0.21.2", ] [[package]] @@ -57,9 +51,9 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "heck" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "indoc" @@ -82,16 +76,6 @@ version = "0.2.154" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - [[package]] name = "memoffset" version = "0.9.1" @@ -107,29 +91,6 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" -[[package]] -name = "parking_lot" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets", -] - [[package]] name = "portable-atomic" version = "1.6.0" @@ -170,17 +131,17 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.21.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8" +checksum = "15ee168e30649f7f234c3d49ef5a7a6cbf5134289bc46c29ff3155fa3221c225" dependencies = [ "cfg-if", "indoc", "libc", "memoffset", - "parking_lot", + "once_cell", "portable-atomic", - "pyo3-build-config", + "pyo3-build-config 0.22.3", "pyo3-ffi", "pyo3-macros", "unindent", @@ -196,21 +157,31 @@ dependencies = [ "target-lexicon", ] +[[package]] +name = "pyo3-build-config" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e61cef80755fe9e46bb8a0b8f20752ca7676dcc07a5277d8b7768c6172e529b3" +dependencies = [ + "once_cell", + "target-lexicon", +] + [[package]] name = "pyo3-ffi" -version = "0.21.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403" +checksum = "67ce096073ec5405f5ee2b8b31f03a68e02aa10d5d4f565eca04acc41931fa1c" dependencies = [ "libc", - "pyo3-build-config", + "pyo3-build-config 0.22.3", ] [[package]] name = "pyo3-macros" -version = "0.21.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c" +checksum = "2440c6d12bc8f3ae39f1e775266fa5122fd0c8891ce7520fa6048e683ad3de28" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -220,13 +191,13 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.21.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c" +checksum = "1be962f0e06da8f8465729ea2cb71a416d2257dff56cbe40a70d3e62a93ae5d1" dependencies = [ "heck", "proc-macro2", - "pyo3-build-config", + "pyo3-build-config 0.22.3", "quote", "syn 2.0.61", ] @@ -240,27 +211,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "redox_syscall" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" -dependencies = [ - "bitflags", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - [[package]] name = "syn" version = "1.0.109" @@ -300,67 +250,3 @@ name = "unindent" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" - -[[package]] -name = "windows-targets" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" diff --git a/src/core/Cargo.toml b/src/core/Cargo.toml index 3353bc8b504..94eeb6d7a3e 100644 --- a/src/core/Cargo.toml +++ b/src/core/Cargo.toml @@ -9,7 +9,7 @@ strip = "debuginfo" opt-level = 3 [dependencies] -pyo3 = { version = "0.21.2", features = ["extension-module"] } +pyo3 = { version = "0.22.3", features = ["extension-module"] } datadog-ddsketch = { git = "https://github.com/DataDog/libdatadog", rev = "v14.3.1" } [build-dependencies] diff --git a/tests/appsec/appsec/test_asm_standalone.py b/tests/appsec/appsec/test_asm_standalone.py index 31624724069..6841314cea8 100644 --- a/tests/appsec/appsec/test_asm_standalone.py +++ b/tests/appsec/appsec/test_asm_standalone.py @@ -1,41 +1,145 @@ #!/usr/bin/env python3 +import copy + import pytest +import ddtrace from ddtrace.contrib.trace_utils import set_http_meta from ddtrace.ext import SpanTypes +from tests.utils import override_env @pytest.fixture( params=[ - {"iast_enabled": True, "appsec_enabled": True, "appsec_standalone_enabled": True}, - {"iast_enabled": True, "appsec_enabled": True, "appsec_standalone_enabled": False}, - {"iast_enabled": True, "appsec_enabled": False, "appsec_standalone_enabled": False}, - {"iast_enabled": True, "appsec_enabled": False, "appsec_standalone_enabled": True}, - {"iast_enabled": False, "appsec_enabled": True, "appsec_standalone_enabled": True}, - {"iast_enabled": False, "appsec_enabled": True, "appsec_standalone_enabled": False}, - {"iast_enabled": False, "appsec_enabled": False, "appsec_standalone_enabled": False}, - {"iast_enabled": False, "appsec_enabled": False, "appsec_standalone_enabled": True}, - {"appsec_enabled": True}, - {"appsec_enabled": False}, - {"iast_enabled": True}, - {"iast_enabled": False}, + {"DD_APPSEC_SCA_ENABLED": "1", "iast_enabled": True, "appsec_enabled": True, "appsec_standalone_enabled": True}, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": True, + "appsec_enabled": True, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": True, + "appsec_enabled": False, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": True, + "appsec_enabled": False, + "appsec_standalone_enabled": True, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": False, + "appsec_enabled": True, + "appsec_standalone_enabled": True, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": False, + "appsec_enabled": True, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": False, + "appsec_enabled": False, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "1", + "iast_enabled": False, + "appsec_enabled": False, + "appsec_standalone_enabled": True, + }, + {"DD_APPSEC_SCA_ENABLED": "1", "appsec_enabled": True}, + {"DD_APPSEC_SCA_ENABLED": "1", "appsec_enabled": False}, + {"DD_APPSEC_SCA_ENABLED": "1", "iast_enabled": True}, + {"DD_APPSEC_SCA_ENABLED": "1", "iast_enabled": False}, + {"DD_APPSEC_SCA_ENABLED": "0", "iast_enabled": True, "appsec_enabled": True, "appsec_standalone_enabled": True}, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": True, + "appsec_enabled": True, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": True, + "appsec_enabled": False, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": True, + "appsec_enabled": False, + "appsec_standalone_enabled": True, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": False, + "appsec_enabled": True, + "appsec_standalone_enabled": True, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": False, + "appsec_enabled": True, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": False, + "appsec_enabled": False, + "appsec_standalone_enabled": False, + }, + { + "DD_APPSEC_SCA_ENABLED": "0", + "iast_enabled": False, + "appsec_enabled": False, + "appsec_standalone_enabled": True, + }, + {"DD_APPSEC_SCA_ENABLED": "0", "appsec_enabled": True}, + {"DD_APPSEC_SCA_ENABLED": "0", "appsec_enabled": False}, + {"DD_APPSEC_SCA_ENABLED": "0", "iast_enabled": True}, + {"DD_APPSEC_SCA_ENABLED": "0", "iast_enabled": False}, ] ) def tracer_appsec_standalone(request, tracer): - tracer.configure(api_version="v0.4", **request.param) - yield tracer, request.param + new_env = {k: v for k, v in request.param.items() if k.startswith("DD_")} + with override_env(new_env): + # Reset the config so it picks up the env var value + ddtrace.config._reset() + + # Copy the params to a new dict, including the env var + request_param_copy = copy.deepcopy(request.param) + + # Remove the environment variables as they are unexpected args for the tracer configure + request.param.pop("DD_APPSEC_SCA_ENABLED", None) + tracer.configure(api_version="v0.4", **request.param) + + yield tracer, request_param_copy + # Reset tracer configuration + ddtrace.config._reset() tracer.configure(api_version="v0.4", appsec_enabled=False, appsec_standalone_enabled=False, iast_enabled=False) def test_appsec_standalone_apm_enabled_metric(tracer_appsec_standalone): tracer, args = tracer_appsec_standalone + with tracer.trace("test", span_type=SpanTypes.WEB) as span: set_http_meta(span, {}, raw_uri="http://example.com/.git", status_code="404") if args.get("appsec_standalone_enabled", None) and ( - args.get("appsec_enabled", None) or args.get("iast_enabled", None) + args.get("appsec_enabled", None) + or args.get("iast_enabled", None) + or args.get("DD_APPSEC_SCA_ENABLED", "0") == "1" ): + assert tracer._apm_opt_out is True assert span.get_metric("_dd.apm.enabled") == 0.0 else: + assert tracer._apm_opt_out is False assert span.get_metric("_dd.apm.enabled") is None diff --git a/tests/appsec/contrib_appsec/conftest.py b/tests/appsec/contrib_appsec/conftest.py index 9773ef124c9..74ad0f655ef 100644 --- a/tests/appsec/contrib_appsec/conftest.py +++ b/tests/appsec/contrib_appsec/conftest.py @@ -45,8 +45,17 @@ def check_waf_timeout(request): @pytest.fixture -def get_tag(root_span): - yield lambda name: root_span().get_tag(name) +def get_tag(test_spans, root_span): + # checking both root spans and web spans for the tag + def get(name): + for span in test_spans.spans: + if span.parent_id is None or span.span_type == "web": + res = span.get_tag(name) + if res is not None: + return res + return root_span().get_tag(name) + + yield get @pytest.fixture diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index 0712e6d6fd8..0d195df764e 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -62,9 +62,13 @@ def location(self, response) -> str: def body(self, response) -> str: raise NotImplementedError + def get_stack_trace(self, root_span, namespace): + appsec_traces = root_span().get_struct_tag(asm_constants.STACK_TRACE.TAG) or {} + stacks = appsec_traces.get(namespace, []) + return stacks + def check_for_stack_trace(self, root_span): - appsec_traces = root_span().get_struct_tag(asm_constants.EXPLOIT_PREVENTION.STACK_TRACES) or {} - exploit = appsec_traces.get("exploit", []) + exploit = self.get_stack_trace(root_span, "exploit") stack_ids = sorted(set(t["id"] for t in exploit)) triggers = get_triggers(root_span()) stack_id_in_triggers = sorted(set(t["stack_id"] for t in (triggers or []) if "stack_id" in t)) @@ -1385,9 +1389,9 @@ def validate_top_function(trace): # there may have been multiple evaluations of other rules too assert (("rule_type", endpoint), ("waf_version", DDWAF_VERSION)) in evals if action_level == 2: - assert get_tag("rasp.request.done") is None + assert get_tag("rasp.request.done") is None, get_tag("rasp.request.done") else: - assert get_tag("rasp.request.done") == endpoint + assert get_tag("rasp.request.done") == endpoint, get_tag("rasp.request.done") assert get_metric(APPSEC.RASP_DURATION) is not None assert get_metric(APPSEC.RASP_DURATION_EXT) is not None assert get_metric(APPSEC.RASP_RULE_EVAL) is not None @@ -1398,7 +1402,7 @@ def validate_top_function(trace): assert "rasp" not in n assert get_triggers(root_span()) is None assert self.check_for_stack_trace(root_span) == [] - assert get_tag("rasp.request.done") == endpoint + assert get_tag("rasp.request.done") == endpoint, get_tag("rasp.request.done") @pytest.mark.parametrize("asm_enabled", [True, False]) @pytest.mark.parametrize("auto_events_enabled", [True, False]) @@ -1505,21 +1509,22 @@ def test_fingerprinting(self, interface, root_span, get_tag, asm_enabled, user_a assert get_tag(asm_constants.FINGERPRINTING.SESSION) is None def test_iast(self, interface, root_span, get_tag): - if interface.name == "fastapi" and asm_config._iast_enabled: - raise pytest.xfail("fastapi does not fully support IAST for now") - from ddtrace.ext import http - url = "/rasp/command_injection/?cmd=ls" + url = "/rasp/command_injection/?cmd=." self.update_tracer(interface) response = interface.client.get(url) assert self.status(response) == 200 assert get_tag(http.STATUS_CODE) == "200" assert self.body(response).startswith("command_injection endpoint") + stack_traces = self.get_stack_trace(root_span, "vulnerability") if asm_config._iast_enabled: assert get_tag("_dd.iast.json") is not None + # checking for iast stack traces + assert stack_traces else: assert get_tag("_dd.iast.json") is None + assert stack_traces == [] @contextmanager diff --git a/tests/appsec/iast/aspects/test_add_aspect.py b/tests/appsec/iast/aspects/test_add_aspect.py index db8f9b212c8..f9f86a4413c 100644 --- a/tests/appsec/iast/aspects/test_add_aspect.py +++ b/tests/appsec/iast/aspects/test_add_aspect.py @@ -15,6 +15,7 @@ from ddtrace.appsec._iast._taint_tracking.aspects import add_aspect from tests.appsec.iast.conftest import _end_iast_context_and_oce from tests.appsec.iast.conftest import _start_iast_context_and_oce +from tests.utils import override_env from tests.utils import override_global_config @@ -319,7 +320,9 @@ def test_propagate_ranges_with_no_context(caplog): ) reset_context() - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): result_2 = add_aspect(result, "another_string") create_context() diff --git a/tests/appsec/iast/conftest.py b/tests/appsec/iast/conftest.py index a277e912829..3daa3611f51 100644 --- a/tests/appsec/iast/conftest.py +++ b/tests/appsec/iast/conftest.py @@ -142,7 +142,9 @@ def check_native_code_exception_in_each_python_aspect_test(request, caplog): if "skip_iast_check_logs" in request.keywords: yield else: - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): yield log_messages = [record.message for record in caplog.get_records("call")] diff --git a/tests/appsec/iast/taint_tracking/test_native_taint_range.py b/tests/appsec/iast/taint_tracking/test_native_taint_range.py index d1683b5ffb4..00079d7772b 100644 --- a/tests/appsec/iast/taint_tracking/test_native_taint_range.py +++ b/tests/appsec/iast/taint_tracking/test_native_taint_range.py @@ -32,6 +32,7 @@ from ddtrace.appsec._iast._taint_tracking.aspects import format_aspect from ddtrace.appsec._iast._taint_tracking.aspects import join_aspect from tests.appsec.iast.conftest import IAST_VALID_LOG +from tests.utils import override_env from tests.utils import override_global_config @@ -499,7 +500,9 @@ def test_race_conditions_reset_contexts_threads(caplog, telemetry_writer): """we want to validate context is working correctly among multiple request and no race condition creating and destroying contexts """ - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): pool = ThreadPool(processes=3) results_async = [pool.apply_async(reset_contexts_loop) for _ in range(70)] _ = [res.get() for res in results_async] diff --git a/tests/appsec/iast/taint_tracking/test_taint_tracking.py b/tests/appsec/iast/taint_tracking/test_taint_tracking.py index 90d9b0c064a..ac3d009633f 100644 --- a/tests/appsec/iast/taint_tracking/test_taint_tracking.py +++ b/tests/appsec/iast/taint_tracking/test_taint_tracking.py @@ -47,7 +47,9 @@ def test_taint_object_with_no_context_should_be_noop(): @pytest.mark.skip_iast_check_logs def test_propagate_ranges_with_no_context(caplog): reset_context() - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): string_input = taint_pyobject( pyobject="abcde", source_name="abcde", source_value="abcde", source_origin=OriginType.PARAMETER ) diff --git a/tests/appsec/iast/test_overhead_control_engine.py b/tests/appsec/iast/test_overhead_control_engine.py index 318f1a2104f..1d1d4d11b90 100644 --- a/tests/appsec/iast/test_overhead_control_engine.py +++ b/tests/appsec/iast/test_overhead_control_engine.py @@ -5,8 +5,7 @@ from ddtrace.appsec._iast import oce from ddtrace.appsec._iast._iast_request_context import get_iast_reporter -from ddtrace.appsec._iast._overhead_control_engine import MAX_REQUESTS -from ddtrace.appsec._iast._overhead_control_engine import MAX_VULNERABILITIES_PER_REQUEST +from ddtrace.settings.asm import config as asm_config from tests.utils import override_global_config @@ -55,7 +54,7 @@ def test_oce_max_vulnerabilities_per_request(iast_context_defaults): m.digest() span_report = get_iast_reporter() - assert len(span_report.vulnerabilities) == MAX_VULNERABILITIES_PER_REQUEST + assert len(span_report.vulnerabilities) == asm_config._iast_max_vulnerabilities_per_requests @pytest.mark.skip_iast_check_logs @@ -72,7 +71,7 @@ def test_oce_reset_vulnerabilities_report(iast_context_defaults): span_report = get_iast_reporter() - assert len(span_report.vulnerabilities) == MAX_VULNERABILITIES_PER_REQUEST + 1 + assert len(span_report.vulnerabilities) == asm_config._iast_max_vulnerabilities_per_requests + 1 @pytest.mark.skip_iast_check_logs @@ -82,7 +81,7 @@ def test_oce_no_race_conditions_in_span(iast_span_defaults): oc = OverheadControl() oc.reconfigure() - assert oc._request_quota == MAX_REQUESTS + assert oc._request_quota == asm_config._iast_max_concurrent_requests # Request 1 tries to acquire the lock assert oc.acquire_request(iast_span_defaults) is True @@ -148,7 +147,6 @@ def test_oce_concurrent_requests_in_spans(iast_span_defaults): """ import threading - from ddtrace.appsec._iast._overhead_control_engine import MAX_REQUESTS from ddtrace.appsec._iast._overhead_control_engine import OverheadControl oc = OverheadControl() @@ -167,7 +165,7 @@ def test_oce_concurrent_requests_in_spans(iast_span_defaults): results.append(thread.join()) # Ensures quota is always within bounds after multithreading scenario - assert 0 <= oc._request_quota <= MAX_REQUESTS + assert 0 <= oc._request_quota <= asm_config._iast_max_concurrent_requests @pytest.mark.skip_iast_check_logs diff --git a/tests/appsec/iast_packages/test_packages.py b/tests/appsec/iast_packages/test_packages.py index c738eb231b9..86aad989007 100644 --- a/tests/appsec/iast_packages/test_packages.py +++ b/tests/appsec/iast_packages/test_packages.py @@ -495,8 +495,7 @@ def uninstall(self, python_cmd): "d8b5635eb590e078a608e083351288a0", "", import_module_to_validate="multipart.multipart", - # This test is failing in CircleCI because, for some reason, instead of installing version - # 0.0.5, it’s installing the latest version + # This test is failing in CircleCI with the latest version test_import=False, test_propagation=True, ), @@ -573,6 +572,8 @@ def uninstall(self, python_cmd): "Parsed TOML data: {'key': 'value'}", "", import_module_to_validate="tomli._parser", + # This test is failing in CircleCI with the latest version + test_import=False, test_propagation=True, ), PackageForTesting( diff --git a/tests/appsec/integrations/pygoat_tests/test_pygoat.py b/tests/appsec/integrations/pygoat_tests/test_pygoat.py index e60d5336b35..f3dd0f173ee 100644 --- a/tests/appsec/integrations/pygoat_tests/test_pygoat.py +++ b/tests/appsec/integrations/pygoat_tests/test_pygoat.py @@ -26,6 +26,7 @@ def client(): agent_client = requests.session() reply = agent_client.get(TESTAGENT_URL + "/start" + TESTAGENT_TOKEN_PARAM, headers=TESTAGENT_HEADERS) + assert reply.status_code == 200 pygoat_client, token = login_to_pygoat() @@ -65,7 +66,7 @@ def get_traces(agent_client: requests.Session) -> requests.Response: def vulnerability_in_traces(vuln_type: str, agent_client: requests.Session) -> bool: time.sleep(5) traces = get_traces(agent_client) - assert traces.status_code == 200 + assert traces.status_code == 200, traces.text traces_list = json.loads(traces.text) class InnerBreakException(Exception): diff --git a/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py b/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py index f0eeb1eb626..4f54bc675c3 100644 --- a/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py +++ b/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py @@ -7,11 +7,19 @@ def test_ddtrace_iast_flask_patch(): import dis import io + import re import sys from tests.utils import override_env from tests.utils import override_global_config + PATTERN = r"""Disassembly of add_test: +(\s*7 0 RESUME 0 +)?\s*8 \d LOAD_GLOBAL \d \((NULL \+ )?_ddtrace_aspects\) +\s*\d+ LOAD_(ATTR|METHOD)\s+\d \(add_aspect\) +\s*\d+ LOAD_FAST 0 \(a\) +\s*\d+ LOAD_FAST 1 \(b\)""" + with override_global_config(dict(_iast_enabled=True)), override_env( dict(DD_IAST_ENABLED="true", DD_IAST_REQUEST_SAMPLING="100") ): @@ -21,10 +29,9 @@ def test_ddtrace_iast_flask_patch(): dis.dis(flask_entrypoint, file=dis_output) str_output = dis_output.getvalue() # Should have replaced the binary op with the aspect in add_test: - assert "(add_aspect)" in str_output - assert "BINARY_ADD" in str_output or "BINARY_OP" not in str_output + assert re.search(PATTERN, str_output), str_output # Should have replaced the app.run() with a pass: - assert "Disassembly of run" not in str_output + # assert "Disassembly of run" not in str_output, str_output del sys.modules["tests.appsec.iast.fixtures.entrypoint.app_main_patched"] diff --git a/tests/ci_visibility/api/fake_runner_efd_faulty_session.py b/tests/ci_visibility/api/fake_runner_efd_faulty_session.py index ea841888de6..4937464e74f 100644 --- a/tests/ci_visibility/api/fake_runner_efd_faulty_session.py +++ b/tests/ci_visibility/api/fake_runner_efd_faulty_session.py @@ -1,6 +1,5 @@ -"""Fake test runner where all too many tests are new, so the session is faulty and no retries are done - -Incorporates setting and deleting tags, as well. +"""Fake test runner where too many tests are new, so the session is faulty and no retries are done +. Starts session before discovery (simulating pytest behavior) Comment lines in the test start/finish lines are there for visual distinction. @@ -90,18 +89,8 @@ def run_tests(): m2_s1_id = ext_api.TestSuiteId(m2_id, "m2_s1") api.InternalTestSuite.discover(m2_s1_id) - # M2_S1 tests (mostly exist to keep under faulty session threshold) - m2_s1_test_ids = [ - api.InternalTestId(m2_s1_id, "m2_s1_t1"), - api.InternalTestId(m2_s1_id, "m2_s1_t2"), - api.InternalTestId(m2_s1_id, "m2_s1_t3"), - api.InternalTestId(m2_s1_id, "m2_s1_t4"), - api.InternalTestId(m2_s1_id, "m2_s1_t5"), - api.InternalTestId(m2_s1_id, "m2_s1_t6"), - api.InternalTestId(m2_s1_id, "m2_s1_t7"), - api.InternalTestId(m2_s1_id, "m2_s1_t8"), - api.InternalTestId(m2_s1_id, "m2_s1_t9"), - ] + # M2_S1 tests + m2_s1_test_ids = [api.InternalTestId(m2_s1_id, f"m2_s1_t{i}") for i in range(35)] for test_id in m2_s1_test_ids: api.InternalTest.discover(test_id) diff --git a/tests/ci_visibility/test_efd.py b/tests/ci_visibility/test_efd.py index c623e5db329..0e2de603c6a 100644 --- a/tests/ci_visibility/test_efd.py +++ b/tests/ci_visibility/test_efd.py @@ -66,7 +66,7 @@ def test_efd_max_retries(self, efd_settings, efd_test_duration_s, expected_max_r mock_session = mock.Mock() mock_session.efd_is_faulty_session.return_value = False - with mock.patch.multiple(efd_test, get_session=lambda *args: mock_session): + with mock.patch.object(TestVisibilityTest, "get_session", lambda *args: mock_session): efd_test.start() # Overwrite the test duration efd_test._span.start_ns -= efd_test_duration_s * 1e9 @@ -156,7 +156,7 @@ def test_efd_final_status(self, test_result, retry_results: t.Iterable[TestStatu ) mock_session = mock.Mock() mock_session.efd_is_faulty_session.return_value = False - with mock.patch.multiple(efd_test, get_session=lambda *args: mock_session): + with mock.patch.object(TestVisibilityTest, "get_session", lambda *args: mock_session): efd_test.start() efd_test.finish_test(test_result) expected_num_retry = 0 @@ -177,13 +177,87 @@ def test_efd_does_not_retry_if_disabled(self): efd_test.finish_test(TestStatus.FAIL) assert efd_test.efd_should_retry() is False - @pytest.mark.parametrize("faulty_session_threshold,expected_faulty", ((None, False), (10, True), (40, False))) - def test_efd_session_faulty(self, faulty_session_threshold, expected_faulty): - """Tests that the number of new tests in a session is correctly used to determine if a session is faulty + @pytest.mark.parametrize( + "faulty_session_threshold,expected_faulty", ((None, True), (10, True), (40, True), (50, False)) + ) + def test_efd_session_faulty_percentage(self, faulty_session_threshold, expected_faulty): + """Tests that the number of new tests in a session is correctly used to determine if a session is faulty based + on the percentage of new tests (as opposed to the absolute number). + + In order to test the percentages fully without hitting the absolute number of new tests threshold, we generate + a large number of both known and new tests. + + There are a total of 100 known and 100 new tests, so 50% are new + """ + + if faulty_session_threshold is not None: + efd_settings = EarlyFlakeDetectionSettings(True, faulty_session_threshold=faulty_session_threshold) + else: + efd_settings = EarlyFlakeDetectionSettings(True) + + ssettings = self._get_session_settings(efd_settings=efd_settings) + test_session = TestVisibilitySession(session_settings=ssettings) + + # Modules 1 and 2 each have one suite with 30 known tests and 20 new tests. + m1_id = TestModuleId("module_1") + m1 = TestVisibilityModule(m1_id.name, session_settings=ssettings) + test_session.add_child(m1_id, m1) + m1_s1_id = TestSuiteId(m1_id, "m1_s1") + m1_s1 = TestVisibilitySuite(m1_s1_id.name, session_settings=ssettings) + m1.add_child(m1_s1_id, m1_s1) + + # Known tests: + for i in range(50): + test_name = f"m1_s1_known_t{i}" + m1_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=False), + ) + + for i in range(50): + test_name = f"m1_s1_new_t{i}" + m1_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=True), + ) + + m2_id = TestModuleId("module_2") + m2 = TestVisibilityModule(m2_id.name, session_settings=ssettings) + test_session.add_child(m2_id, m2) + m2_s1_id = TestSuiteId(m2_id, "suite_1") + m2_s1 = TestVisibilitySuite(m2_s1_id.name, session_settings=ssettings) + m2.add_child(m2_s1_id, m2_s1) + + # Known tests: + for i in range(50): + test_name = f"m2_s1_known_t{i}" + m2_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=False), + ) + + for i in range(50): + test_name = f"m2_s1_new_t{i}" + m2_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=True), + ) + + assert test_session.efd_is_faulty_session() == expected_faulty + + @pytest.mark.parametrize( + "faulty_session_threshold,expected_faulty", ((None, True), (10, True), (40, False), (50, False)) + ) + def test_efd_session_faulty_absolute(self, faulty_session_threshold, expected_faulty): + """Tests that the number of new tests in a session is correctly used to determine if a session is faulty based + on the absolute number of new tests. For the purpose of this test, the test structure is hardcoded. Whether or not tests are properly marked as new, etc., should be tested elsewhere. + + There are a total of 10 known tests and 40 new tests, so 80% of tests are new. """ + if faulty_session_threshold is not None: efd_settings = EarlyFlakeDetectionSettings(True, faulty_session_threshold=faulty_session_threshold) else: @@ -192,25 +266,28 @@ def test_efd_session_faulty(self, faulty_session_threshold, expected_faulty): ssettings = self._get_session_settings(efd_settings=efd_settings) test_session = TestVisibilitySession(session_settings=ssettings) - # Module + # Modules 1 and 2 each have one suite with 5 known tests and 20 new tests. m1_id = TestModuleId("module_1") m1 = TestVisibilityModule(m1_id.name, session_settings=ssettings) test_session.add_child(m1_id, m1) m1_s1_id = TestSuiteId(m1_id, "m1_s1") m1_s1 = TestVisibilitySuite(m1_s1_id.name, session_settings=ssettings) m1.add_child(m1_s1_id, m1_s1) - m1_s1_t1_id = InternalTestId(m1_s1_id, name="m1_s1_t1") - m1_s1.add_child(m1_s1_t1_id, TestVisibilityTest(m1_s1_t1_id.name, session_settings=ssettings, is_new=True)) - m1_s1_t2_id = InternalTestId(m1_s1_id, name="m1_s1_t2") - m1_s1.add_child(m1_s1_t2_id, TestVisibilityTest(m1_s1_t2_id.name, session_settings=ssettings, is_new=False)) - m1_s1_t3_id = InternalTestId(m1_s1_id, name="m1_s1_t3") - m1_s1.add_child(m1_s1_t3_id, TestVisibilityTest(m1_s1_t3_id.name, session_settings=ssettings, is_new=False)) - - m1_s2_id = TestSuiteId(m1_id, "suite_2") - m1_s2 = TestVisibilitySuite(m1_s2_id.name, session_settings=ssettings) - m1.add_child(m1_s2_id, m1_s2) - m1_s2_t1_id = InternalTestId(m1_s2_id, name="m1_s2_t1") - m1_s2.add_child(m1_s2_t1_id, TestVisibilityTest(m1_s2_t1_id.name, session_settings=ssettings, is_new=True)) + + # Known tests: + for i in range(5): + test_name = f"m1_s1_known_t{i}" + m1_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=False), + ) + + for i in range(20): + test_name = f"m1_s1_new_t{i}" + m1_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=True), + ) m2_id = TestModuleId("module_2") m2 = TestVisibilityModule(m2_id.name, session_settings=ssettings) @@ -219,20 +296,19 @@ def test_efd_session_faulty(self, faulty_session_threshold, expected_faulty): m2_s1 = TestVisibilitySuite(m2_s1_id.name, session_settings=ssettings) m2.add_child(m2_s1_id, m2_s1) - m2_s1_t1_id = InternalTestId(m2_s1_id, name="m2_s1_t1") - m2_s1.add_child(m2_s1_t1_id, TestVisibilityTest(m2_s1_t1_id.name, session_settings=ssettings, is_new=False)) - m2_s1_t2_id = InternalTestId(m2_s1_id, name="m2_s1_t2") - m2_s1.add_child(m2_s1_t2_id, TestVisibilityTest(m2_s1_t2_id.name, session_settings=ssettings, is_new=False)) - m2_s1_t3_id = InternalTestId(m2_s1_id, name="m2_s1_t3") - m2_s1.add_child(m2_s1_t3_id, TestVisibilityTest(m2_s1_t3_id.name, session_settings=ssettings, is_new=False)) - - # A test with parameters is never considered new: - m2_s1_t4_id = InternalTestId(m2_s1_id, name="m2_s1_t4", parameters='{"hello": "world"}') - m2_s1.add_child( - m2_s1_t4_id, - TestVisibilityTest( - m2_s1_t4_id.name, session_settings=ssettings, is_new=True, parameters=m2_s1_t4_id.parameters - ), - ) + # Known tests: + for i in range(5): + test_name = f"m2_s1_known_t{i}" + m2_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=False), + ) + + for i in range(20): + test_name = f"m2_s1_new_t{i}" + m2_s1.add_child( + InternalTestId(m1_s1_id, name=test_name), + TestVisibilityTest(test_name, session_settings=ssettings, is_new=True), + ) assert test_session.efd_is_faulty_session() == expected_faulty diff --git a/tests/commands/test_runner.py b/tests/commands/test_runner.py index 8c5dd0bd7f8..b6ad3cbd755 100644 --- a/tests/commands/test_runner.py +++ b/tests/commands/test_runner.py @@ -229,6 +229,7 @@ def test_debug_mode(self): assert b"debug mode has been enabled for the ddtrace logger" in p.stderr.read() +@pytest.mark.skipif(sys.version_info > (3, 12), reason="Profiling unsupported with 3.13") def test_env_profiling_enabled(monkeypatch): """DD_PROFILING_ENABLED allows enabling the global profiler.""" # Off by default diff --git a/tests/contrib/aiohttp/test_request.py b/tests/contrib/aiohttp/test_request.py index d32da71a927..cde0f311521 100644 --- a/tests/contrib/aiohttp/test_request.py +++ b/tests/contrib/aiohttp/test_request.py @@ -4,8 +4,6 @@ from ddtrace import config from ddtrace.contrib.aiohttp.middlewares import trace_app -from ddtrace.contrib.aiohttp.patch import patch -from ddtrace.contrib.aiohttp.patch import unpatch from tests.utils import assert_is_measured from tests.utils import override_global_config @@ -76,9 +74,7 @@ async def test_user_specified_service(tracer, aiohttp_client, loop): When a service name is specified by the user The aiohttp integration should use it as the service name """ - unpatch() with override_global_config(dict(service="mysvc")): - patch() app = setup_app() trace_app(app, tracer) client = await aiohttp_client(app) diff --git a/tests/contrib/anthropic/test_anthropic_llmobs.py b/tests/contrib/anthropic/test_anthropic_llmobs.py index f286a890209..e2850a4157f 100644 --- a/tests/contrib/anthropic/test_anthropic_llmobs.py +++ b/tests/contrib/anthropic/test_anthropic_llmobs.py @@ -1,6 +1,5 @@ from pathlib import Path -import mock import pytest from tests.llmobs._utils import _expected_llmobs_llm_span_event @@ -117,37 +116,6 @@ def test_error(self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_ ) ) - def test_error_unserializable_arg( - self, anthropic, ddtrace_global_config, mock_llmobs_writer, mock_tracer, request_vcr - ): - """Ensure we handle unserializable arguments correctly and still emit llmobs records.""" - llm = anthropic.Anthropic() - with pytest.raises(Exception): - llm.messages.create( - model="claude-3-opus-20240229", - max_tokens=object(), - temperature=0.8, - messages=[{"role": "user", "content": "Hello World!"}], - ) - - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_span = _expected_llmobs_llm_span_event( - span, - model_name="claude-3-opus-20240229", - model_provider="anthropic", - input_messages=[{"content": "Hello World!", "role": "user"}], - output_messages=[{"content": ""}], - error=span.get_tag("error.type"), - error_message=span.get_tag("error.message"), - error_stack=span.get_tag("error.stack"), - metadata={"temperature": 0.8, "max_tokens": mock.ANY}, - tags={"ml_app": "", "service": "tests.contrib.anthropic"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_span) - actual_span = mock_llmobs_writer.enqueue.call_args[0][0] - assert "[Unserializable object: func.HttpResponse: + return func.HttpResponse("Hello Datadog!") + + +@app.route(route="httpgeterror", auth_level=func.AuthLevel.ANONYMOUS, methods=[func.HttpMethod.GET]) +def http_get_error(req: func.HttpRequest) -> func.HttpResponse: + raise Exception("Test Error") + + +@app.route(route="httppostok", auth_level=func.AuthLevel.ANONYMOUS, methods=[func.HttpMethod.POST]) +def http_post_ok(req: func.HttpRequest) -> func.HttpResponse: + return func.HttpResponse("Hello Datadog!") diff --git a/tests/contrib/azure_functions/azure_function_app/host.json b/tests/contrib/azure_functions/azure_function_app/host.json new file mode 100644 index 00000000000..06d01bdaa95 --- /dev/null +++ b/tests/contrib/azure_functions/azure_function_app/host.json @@ -0,0 +1,15 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[4.*, 5.0.0)" + } +} diff --git a/tests/contrib/azure_functions/azure_function_app/local.settings.json b/tests/contrib/azure_functions/azure_function_app/local.settings.json new file mode 100644 index 00000000000..fb38bf93ca8 --- /dev/null +++ b/tests/contrib/azure_functions/azure_function_app/local.settings.json @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Values": { + "FUNCTIONS_WORKER_RUNTIME": "python", + "FUNCTIONS_EXTENSION_VERSION": "~4", + "AzureWebJobsFeatureFlags": "EnableWorkerIndexing", + "AzureWebJobsStorage": "", + "WEBSITE_SITE_NAME": "test-func" + } +} diff --git a/tests/contrib/azure_functions/test_azure_functions_patch.py b/tests/contrib/azure_functions/test_azure_functions_patch.py new file mode 100644 index 00000000000..acc58df654a --- /dev/null +++ b/tests/contrib/azure_functions/test_azure_functions_patch.py @@ -0,0 +1,31 @@ +# This test script was automatically generated by the contrib-patch-tests.py +# script. If you want to make changes to it, you should make sure that you have +# removed the ``_generated`` suffix from the file name, to prevent the content +# from being overwritten by future re-generations. + +from ddtrace.contrib.azure_functions import get_version +from ddtrace.contrib.azure_functions.patch import patch + + +try: + from ddtrace.contrib.azure_functions.patch import unpatch +except ImportError: + unpatch = None +from tests.contrib.patch import PatchTestCase + + +class TestAzure_FunctionsPatch(PatchTestCase.Base): + __integration_name__ = "azure_functions" + __module_name__ = "azure.functions" + __patch_func__ = patch + __unpatch_func__ = unpatch + __get_version__ = get_version + + def assert_module_patched(self, azure_functions): + pass + + def assert_not_module_patched(self, azure_functions): + pass + + def assert_not_module_double_patched(self, azure_functions): + pass diff --git a/tests/contrib/azure_functions/test_azure_functions_snapshot.py b/tests/contrib/azure_functions/test_azure_functions_snapshot.py new file mode 100644 index 00000000000..c236122181f --- /dev/null +++ b/tests/contrib/azure_functions/test_azure_functions_snapshot.py @@ -0,0 +1,64 @@ +import os +import signal +import subprocess +import time + +import pytest + +from tests.webclient import Client + + +DEFAULT_HEADERS = { + "User-Agent": "python-httpx/x.xx.x", +} + + +@pytest.fixture +def azure_functions_client(): + # Copy the env to get the correct PYTHONPATH and such + # from the virtualenv. + # webservers might exec or fork into another process, so we need to os.setsid() to create a process group + # (all of which will listen to signals sent to the parent) so that we can kill the whole application. + proc = subprocess.Popen( + ["func", "start"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + close_fds=True, + env=os.environ.copy(), + preexec_fn=os.setsid, + cwd=os.path.join(os.path.dirname(__file__), "azure_function_app"), + ) + try: + client = Client("http://0.0.0.0:7071") + # Wait for the server to start up + try: + client.wait(delay=0.5) + yield client + client.get_ignored("/shutdown") + except Exception: + pass + # At this point the traces have been sent to the test agent + # but the test agent hasn't necessarily finished processing + # the traces (race condition) so wait just a bit for that + # processing to complete. + time.sleep(1) + finally: + os.killpg(proc.pid, signal.SIGKILL) + proc.wait() + + +@pytest.mark.snapshot +def test_http_get_ok(azure_functions_client: Client) -> None: + assert azure_functions_client.get("/api/httpgetok?key=val", headers=DEFAULT_HEADERS).status_code == 200 + + +@pytest.mark.snapshot(ignores=["meta.error.stack"]) +def test_http_get_error(azure_functions_client: Client) -> None: + assert azure_functions_client.get("/api/httpgeterror", headers=DEFAULT_HEADERS).status_code == 500 + + +@pytest.mark.snapshot +def test_http_post_ok(azure_functions_client: Client) -> None: + assert ( + azure_functions_client.post("/api/httppostok", headers=DEFAULT_HEADERS, data={"key": "val"}).status_code == 200 + ) diff --git a/tests/contrib/celery/run_tasks.py b/tests/contrib/celery/run_tasks.py new file mode 100644 index 00000000000..e91454ab5bb --- /dev/null +++ b/tests/contrib/celery/run_tasks.py @@ -0,0 +1,5 @@ +from tasks import fn_a +from tasks import fn_b + + +(fn_a.si() | fn_b.si()).delay() diff --git a/tests/contrib/celery/tasks.py b/tests/contrib/celery/tasks.py new file mode 100644 index 00000000000..a9dfc936ae4 --- /dev/null +++ b/tests/contrib/celery/tasks.py @@ -0,0 +1,14 @@ +from celery import Celery + + +app = Celery("tasks") + + +@app.task(name="tests.contrib.celery.tasks.fn_a") +def fn_a(): + return "a" + + +@app.task(name="tests.contrib.celery.tasks.fn_b") +def fn_b(): + return "b" diff --git a/tests/contrib/celery/test_chained_task.py b/tests/contrib/celery/test_chained_task.py new file mode 100644 index 00000000000..5fd0c543e72 --- /dev/null +++ b/tests/contrib/celery/test_chained_task.py @@ -0,0 +1,62 @@ +import os +import re +import subprocess +import time + +from celery import Celery + + +# Ensure that when we call Celery chains, the root span has celery specific span tags +# The test_integration.py setup doesn't perfectly mimic the condition of a worker process running. +# This test runs the worker as a side so we can check the tracer logs afterwards to ensure expected span results. +# See https://github.com/DataDog/dd-trace-py/issues/11479 +def test_task_chain_task_call_task(): + app = Celery("tasks") + + celery_worker_cmd = "ddtrace-run celery -A tasks worker -c 1 -l DEBUG -n uniquename1 -P solo" + celery_task_runner_cmd = "ddtrace-run python run_tasks.py" + + # The commands need to run from the directory where this test file lives + current_directory = str(os.path.dirname(__file__)) + + worker_process = subprocess.Popen( + celery_worker_cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + preexec_fn=os.setsid, + close_fds=True, + cwd=current_directory, + ) + + max_wait_time = 10 + waited_so_far = 0 + # {app.control.inspect().active() returns {'celery@uniquename1': []} when the worker is running} + while app.control.inspect().active() is None and waited_so_far < max_wait_time: + time.sleep(1) + waited_so_far += 1 + + # The task should only run after the Celery worker has sufficient time to start up + task_runner_process = subprocess.Popen( + celery_task_runner_cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + preexec_fn=os.setsid, + close_fds=True, + cwd=current_directory, + ) + + task_runner_process.wait() + # Kill the process so it starts to send traces to the Trace Agent + worker_process.kill() + worker_logs = worker_process.stderr.read() + + # Check that the root span was created with one of the Celery specific tags, such as celery.correlation_id + # Some versions of python seem to require escaping when using `re.search`: + old_pattern_match = r"resource=\\'tests.contrib.celery.tasks.fn_a\\' type=\\'worker\\' .* tags=.*correlation_id.*" + new_pattern_match = r"resource=\'tests.contrib.celery.tasks.fn_a\' type=\'worker\' .* tags=.*correlation_id.*" + + pattern_exists = ( + re.search(old_pattern_match, str(worker_logs)) is not None + or re.search(new_pattern_match, str(worker_logs)) is not None + ) + assert pattern_exists is not None diff --git a/tests/contrib/celery/test_tagging.py b/tests/contrib/celery/test_tagging.py index af40c4f9209..2809364ba13 100644 --- a/tests/contrib/celery/test_tagging.py +++ b/tests/contrib/celery/test_tagging.py @@ -102,7 +102,7 @@ def test_amqp_task(instrument_celery, traced_amqp_celery_app): shutdown_timeout=30, ): t = add.delay(4, 4) - assert t.get(timeout=2) == 8 + assert t.get(timeout=30) == 8 # wait for spans to be received time.sleep(3) diff --git a/tests/contrib/django/test_django_appsec_iast.py b/tests/contrib/django/test_django_appsec_iast.py index 89495dcac80..efe0fa9acd0 100644 --- a/tests/contrib/django/test_django_appsec_iast.py +++ b/tests/contrib/django/test_django_appsec_iast.py @@ -41,7 +41,9 @@ def check_native_code_exception_in_each_django_test(request, caplog, telemetry_w yield else: caplog.set_level(logging.DEBUG) - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): yield log_messages = [record.message for record in caplog.get_records("call")] diff --git a/tests/contrib/django/test_django_snapshots.py b/tests/contrib/django/test_django_snapshots.py index feaead253d8..d7402e37083 100644 --- a/tests/contrib/django/test_django_snapshots.py +++ b/tests/contrib/django/test_django_snapshots.py @@ -107,7 +107,6 @@ def test_middleware_trace_callable_view(client): assert client.get("/feed-view/").status_code == 200 -@flaky(until=1706677200) @pytest.mark.skipif( sys.version_info >= (3, 10, 0), reason=("func_name changed with Python 3.10 which changes the resource name." "TODO: new snapshot required."), diff --git a/tests/contrib/elasticsearch/test_elasticsearch.py b/tests/contrib/elasticsearch/test_elasticsearch.py index ecb09100387..b80b4486e71 100644 --- a/tests/contrib/elasticsearch/test_elasticsearch.py +++ b/tests/contrib/elasticsearch/test_elasticsearch.py @@ -1,5 +1,7 @@ import datetime +from http.client import HTTPConnection from importlib import import_module +import time import pytest @@ -38,6 +40,20 @@ raise ImportError("could not import any of {0!r}".format(module_names)) +def wait_for_es(host: str, port: int): + # Wait for up to 160 seconds for ES to start. + # DEV: Elasticsearch is pretty quick, but OpenSearch can take a long time to start. + for _ in range(80): + try: + conn = HTTPConnection(f"{host}:{port}") + conn.request("GET", "/") + conn.getresponse() + return + except Exception: + time.sleep(2) + raise Exception(f"Could not connect to ES at {host}:{port}") + + class ElasticsearchPatchTest(TracerTestCase): """ Elasticsearch integration test suite. @@ -67,6 +83,8 @@ def setUp(self): super(ElasticsearchPatchTest, self).setUp() es = self._get_es() + config = self._get_es_config() + wait_for_es(config["host"], config["port"]) tags = { # `component` is a reserved tag. Setting it via `Pin` should have no effect. "component": "foo", diff --git a/tests/contrib/fastapi/test_fastapi_appsec_iast.py b/tests/contrib/fastapi/test_fastapi_appsec_iast.py index 9688c7d06b7..7f1a140ffc2 100644 --- a/tests/contrib/fastapi/test_fastapi_appsec_iast.py +++ b/tests/contrib/fastapi/test_fastapi_appsec_iast.py @@ -24,6 +24,7 @@ from ddtrace.contrib.internal.fastapi.patch import patch as patch_fastapi from ddtrace.contrib.sqlite3.patch import patch as patch_sqlite_sqli from tests.appsec.iast.iast_utils import get_line_and_hash +from tests.utils import override_env from tests.utils import override_global_config @@ -57,7 +58,9 @@ def check_native_code_exception_in_each_fastapi_test(request, caplog, telemetry_ yield else: caplog.set_level(logging.DEBUG) - with override_global_config(dict(_iast_debug=True)), caplog.at_level(logging.DEBUG): + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( + dict(_iast_debug=True) + ), caplog.at_level(logging.DEBUG): yield log_messages = [record.msg for record in caplog.get_records("call")] diff --git a/tests/contrib/flask/app.py b/tests/contrib/flask/app.py index fbd06dd6990..82059ce0eaa 100644 --- a/tests/contrib/flask/app.py +++ b/tests/contrib/flask/app.py @@ -1,3 +1,4 @@ +import hashlib import os import subprocess import sys @@ -100,3 +101,9 @@ def run_subcommunicatenoshell(): subp.wait() ret = subp.returncode return str(ret), 200 + + +@app.route("/md5sum") +def md5sum(): + data = request.args.get("q").encode() + return hashlib.md5(data).hexdigest() diff --git a/tests/contrib/flask/test_appsec_flask_pytest_iast_no_snapshot.py b/tests/contrib/flask/test_appsec_flask_pytest_iast_no_snapshot.py new file mode 100644 index 00000000000..801cffa4b8a --- /dev/null +++ b/tests/contrib/flask/test_appsec_flask_pytest_iast_no_snapshot.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +import os +import subprocess +import time + +import pytest + + +@pytest.mark.parametrize("iast_enabled", ["true", "false"]) +@pytest.mark.parametrize("iast_request_sampling", ["100.0", "0.0"]) +@pytest.mark.parametrize("pytest_use_new_plugin", ["true", "false"]) +def test_flask_pytest_iast(iast_enabled, iast_request_sampling, pytest_use_new_plugin): + from tests.utils import _build_env + + env = _build_env() + env.update( + { + # Avoid noisy database spans being output on app startup/teardown. + "DD_TRACE_SQLITE3_ENABLED": "0", + "DD_TRACE_SQLITE_ENABLED": "0", + "DD_IAST_ENABLED": iast_enabled, + "DD_TRACE_DEBUG": "true", + "DD_PYTEST_USE_NEW_PLUGIN_BETA": pytest_use_new_plugin, + "DD_IAST_REQUEST_SAMPLING": iast_request_sampling, + # "DD_API_KEY": "invalidapikey", + # "DD_CIVISIBILITY_AGENTLESS_ENABLED": "1", + } + ) + proc = subprocess.Popen( + "pytest --ddtrace --ddtrace-patch-all --no-cov tests/contrib/flask/test_flask_pytest_iast.py".split(), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + env=env, + preexec_fn=os.setsid, + cwd=str(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))), + ) + try: + time.sleep(0.2) + finally: + proc.wait() + # DEV uncomment this line if you need more info locally + # stdout = proc.stdout.read() + + stderr = proc.stderr.read() + split_stderr = stderr.decode("utf-8").split("\n") + + found = False + for line in split_stderr: + if "WEAK_HASH" in line: + assert line.startswith("finishing span name='pytest.test'") + found = True + break + + if iast_enabled == "true" and iast_request_sampling == "100": + assert found + else: + assert not found diff --git a/tests/contrib/flask/test_flask_appsec_iast.py b/tests/contrib/flask/test_flask_appsec_iast.py index 020cbe27a98..f1bed61cb9d 100644 --- a/tests/contrib/flask/test_flask_appsec_iast.py +++ b/tests/contrib/flask/test_flask_appsec_iast.py @@ -19,6 +19,7 @@ from ddtrace.contrib.sqlite3.patch import patch as patch_sqlite_sqli from tests.appsec.iast.iast_utils import get_line_and_hash from tests.contrib.flask import BaseFlaskTestCase +from tests.utils import override_env from tests.utils import override_global_config @@ -35,7 +36,7 @@ def inject_fixtures(self, caplog, telemetry_writer): # noqa: F811 self._caplog = caplog def setUp(self): - with override_global_config( + with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( dict( _iast_enabled=True, _deduplication_enabled=False, diff --git a/tests/contrib/flask/test_flask_pytest_iast.py b/tests/contrib/flask/test_flask_pytest_iast.py new file mode 100644 index 00000000000..fcacf6b36c9 --- /dev/null +++ b/tests/contrib/flask/test_flask_pytest_iast.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +""" +This test suite is actually used as fixture in end-to-end test +for pytest IAST integration. +""" + +import urllib.parse + +import pytest + +from .app import app as real_app + + +@pytest.fixture() +def app(): + return real_app + + +@pytest.fixture() +def client(app): + return app.test_client() + + +def test_md5_request(client): + data = b"foobar" + urlencoded_data = urllib.parse.urlencode({"q": data}) + response = client.get("/md5sum?%s" % urlencoded_data) + assert response.status_code == 200 diff --git a/tests/contrib/futures/test_propagation.py b/tests/contrib/futures/test_propagation.py index d4d5beb8946..037aebd9ce5 100644 --- a/tests/contrib/futures/test_propagation.py +++ b/tests/contrib/futures/test_propagation.py @@ -1,4 +1,5 @@ import concurrent.futures +import sys import time import pytest @@ -406,6 +407,7 @@ def fn(): assert spans[1].parent_id == spans[0].span_id +@pytest.mark.skipif(sys.version_info > (3, 12), reason="Fails on 3.13") @pytest.mark.subprocess(ddtrace_run=True, timeout=5) def test_concurrent_futures_with_gevent(): """Check compatibility between the integration and gevent""" diff --git a/tests/contrib/kafka/test_kafka.py b/tests/contrib/kafka/test_kafka.py index 9bcf4ffc538..d49f85f26b2 100644 --- a/tests/contrib/kafka/test_kafka.py +++ b/tests/contrib/kafka/test_kafka.py @@ -885,6 +885,35 @@ def test_context_header_injection_works_no_client_added_headers(kafka_topic, pro assert propagation_asserted is True +def test_consumer_uses_active_context_when_no_valid_distributed_context_exists( + kafka_topic, producer, consumer, dummy_tracer +): + # use a random int in this string to prevent reading a message produced by a previous test run + test_string = "producer does not inject context test " + str(random.randint(0, 1000)) + test_key = "producer does not inject context test " + str(random.randint(0, 1000)) + PAYLOAD = bytes(test_string, encoding="utf-8") + + producer.produce(kafka_topic, PAYLOAD, key=test_key) + producer.flush() + + Pin.override(consumer, tracer=dummy_tracer) + + with dummy_tracer.trace("kafka consumer parent span") as parent_span: + with override_config("kafka", dict(distributed_tracing_enabled=True)): + message = None + while message is None or str(message.value()) != str(PAYLOAD): + message = consumer.poll() + + traces = dummy_tracer.pop_traces() + consume_span = traces[len(traces) - 1][-1] + + # assert consumer_span parent is our custom span + assert consume_span.name == "kafka.consume" + assert consume_span.parent_id == parent_span.span_id + + Pin.override(consumer, tracer=None) + + def test_span_has_dsm_payload_hash(dummy_tracer, consumer, producer, kafka_topic): Pin.override(producer, tracer=dummy_tracer) Pin.override(consumer, tracer=dummy_tracer) diff --git a/tests/contrib/openai/test_openai_llmobs.py b/tests/contrib/openai/test_openai_llmobs.py index ddba259e928..a1a2b93a5ca 100644 --- a/tests/contrib/openai/test_openai_llmobs.py +++ b/tests/contrib/openai/test_openai_llmobs.py @@ -867,38 +867,6 @@ def test_embedding_string_base64(self, openai, ddtrace_global_config, mock_llmob ) ) - def test_unserializable_param_is_handled(self, openai, ddtrace_global_config, mock_llmobs_writer, mock_tracer): - with pytest.raises(Exception): - model = "babbage-002" - client = openai.OpenAI() - client.completions.create( - model=model, - prompt="Hello world", - temperature=0.8, - n=object(), - stop=".", - max_tokens=10, - user="ddtrace-test", - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_span = _expected_llmobs_llm_span_event( - span, - model_name=model, - model_provider="openai", - input_messages=[{"content": "Hello world"}], - output_messages=[{"content": ""}], - metadata={"temperature": 0.8, "max_tokens": 10, "n": mock.ANY, "stop": ".", "user": "ddtrace-test"}, - token_metrics={}, - error=span.get_tag("error.type"), - error_message=span.get_tag("error.message"), - error_stack=span.get_tag("error.stack"), - tags={"ml_app": "", "service": "tests.contrib.openai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_span) - actual_span = mock_llmobs_writer.enqueue.call_args[0][0] - assert "[Unserializable object: 0.0002 assert spans[0].get_metric(BENCHMARK_RUN) > 0 diff --git a/tests/contrib/suitespec.yml b/tests/contrib/suitespec.yml index 2f14127ddf0..83a48ea1f48 100644 --- a/tests/contrib/suitespec.yml +++ b/tests/contrib/suitespec.yml @@ -23,6 +23,9 @@ components: - ddtrace/contrib/aws_lambda/* - ddtrace/contrib/internal/aws_lambda/* - ddtrace/ext/aws.py + azure_functions: + - ddtrace/contrib/azure_functions/* + - ddtrace/contrib/internal/azure_functions/* botocore: - ddtrace/contrib/botocore/* - ddtrace/contrib/internal/botocore/* @@ -374,6 +377,17 @@ suites: - tests/snapshots/tests.{suite}.* runner: riot snapshot: true + azure_functions: + paths: + - '@bootstrap' + - '@core' + - '@contrib' + - '@tracing' + - '@azure_functions' + - tests/contrib/azure_functions/* + - tests/snapshots/tests.contrib.azure_functions.* + runner: riot + snapshot: true botocore: parallelism: 6 paths: diff --git a/tests/debugging/exception/test_replay.py b/tests/debugging/exception/test_replay.py index 8b9a2a7d830..54baeb8b826 100644 --- a/tests/debugging/exception/test_replay.py +++ b/tests/debugging/exception/test_replay.py @@ -161,8 +161,8 @@ def b_chain(bar): m = 4 try: a(bar % m) - except ValueError: - raise KeyError("chain it") + except ValueError as exc: + raise KeyError("chain it") from exc def c(foo=42): with self.trace("c"): diff --git a/tests/debugging/function/test_discovery.py b/tests/debugging/function/test_discovery.py index 1cd977d06af..6f247bc7fd0 100644 --- a/tests/debugging/function/test_discovery.py +++ b/tests/debugging/function/test_discovery.py @@ -1,6 +1,7 @@ import pytest from ddtrace.debugging._function.discovery import FunctionDiscovery +from ddtrace.internal.module import ModuleWatchdog import tests.submod.stuff as stuff @@ -12,7 +13,7 @@ def stuff_discovery(): def test_abs_stuff(): import tests.submod.absstuff as absstuff - assert sorted(FunctionDiscovery.from_module(absstuff).keys()) == [7, 11, 16, 19] + assert set(FunctionDiscovery.from_module(absstuff).keys()) >= {7, 11, 16, 19} def test_function_discovery(stuff_discovery): @@ -106,16 +107,35 @@ def test_discovery_after_external_wrapping(stuff): def wrapper(wrapped, inst, args, kwargs): pass + original_function = stuff.Stuff.instancestuff + wrapt.wrap_function_wrapper(stuff, "Stuff.instancestuff", wrapper) assert isinstance(stuff.Stuff.instancestuff, (wrapt.BoundFunctionWrapper, wrapt.FunctionWrapper)) code = stuff.Stuff.instancestuff.__code__ - f = FunctionDiscovery(stuff)[36][0] + f, *_ = FunctionDiscovery(stuff).at_line(36) - assert isinstance(f, (wrapt.BoundFunctionWrapper, wrapt.FunctionWrapper)) + assert f is original_function or isinstance(f, (wrapt.BoundFunctionWrapper, wrapt.FunctionWrapper)), f assert f.__code__ is code def test_property_non_function_getter(stuff_discovery): with pytest.raises(ValueError): stuff_discovery.by_name("PropertyStuff.foo") + + +def test_custom_decorated_stuff(): + class DiscoveryModuleWatchdog(ModuleWatchdog): + def transform(self, code, module): + return FunctionDiscovery.transformer(code, module) + + DiscoveryModuleWatchdog.install() + + import tests.submod.custom_decorated_stuff as custom_decorated_stuff + + fd = FunctionDiscovery.from_module(custom_decorated_stuff) + + (home,) = fd.at_line(17) + assert home.__qualname__ == "home" + + DiscoveryModuleWatchdog.uninstall() diff --git a/tests/debugging/signal/test_collector.py b/tests/debugging/signal/test_collector.py index 2e2a77ec098..49e4f1aef2c 100644 --- a/tests/debugging/signal/test_collector.py +++ b/tests/debugging/signal/test_collector.py @@ -6,7 +6,7 @@ import mock from ddtrace.debugging._signal.collector import SignalCollector -from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.log import LogSignal from ddtrace.debugging._signal.model import SignalState from ddtrace.debugging._signal.snapshot import Snapshot from tests.debugging.utils import create_snapshot_line_probe diff --git a/tests/debugging/test_debugger.py b/tests/debugging/test_debugger.py index 2a0bdaf13d8..0cc65bc43cf 100644 --- a/tests/debugging/test_debugger.py +++ b/tests/debugging/test_debugger.py @@ -39,8 +39,6 @@ from tests.debugging.utils import ddexpr from tests.debugging.utils import ddstrtempl from tests.internal.remoteconfig import rcm_endpoint -from tests.submod.stuff import Stuff -from tests.submod.stuff import modulestuff as imported_modulestuff from tests.utils import TracerTestCase from tests.utils import call_program @@ -71,7 +69,7 @@ def simple_debugger_test(probe, func): return snapshots -def test_debugger_line_probe_on_instance_method(): +def test_debugger_line_probe_on_instance_method(stuff): snapshots = simple_debugger_test( create_snapshot_line_probe( probe_id="probe-instance-method", @@ -79,7 +77,7 @@ def test_debugger_line_probe_on_instance_method(): line=36, condition=None, ), - lambda: Stuff().instancestuff(), + stuff.Stuff().instancestuff, ) (snapshot,) = snapshots @@ -89,15 +87,15 @@ def test_debugger_line_probe_on_instance_method(): assert snapshot["debugger"]["snapshot"]["duration"] is None -def test_debugger_line_probe_on_imported_module_function(): - lineno = min(linenos(imported_modulestuff)) +def test_debugger_line_probe_on_imported_module_function(stuff): + lineno = min(linenos(stuff.modulestuff)) snapshots = simple_debugger_test( create_snapshot_line_probe( probe_id="probe-instance-method", source_file="tests/submod/stuff.py", line=lineno, ), - lambda: imported_modulestuff(42), + lambda: stuff.modulestuff(42), ) (snapshot,) = snapshots @@ -107,7 +105,7 @@ def test_debugger_line_probe_on_imported_module_function(): @pytest.mark.parametrize( - "probe, trigger", + "probe", [ ( create_snapshot_function_probe( @@ -115,8 +113,7 @@ def test_debugger_line_probe_on_imported_module_function(): module="tests.submod.stuff", func_qname="Stuff.instancestuff", rate=1000, - ), - lambda: Stuff().instancestuff(42), + ) ), ( create_snapshot_line_probe( @@ -124,14 +121,11 @@ def test_debugger_line_probe_on_imported_module_function(): source_file="tests/submod/stuff.py", line=36, rate=1000, - ), - lambda: Stuff().instancestuff(42), + ) ), ], ) -def test_debugger_probe_new_delete(probe, trigger): - global Stuff - +def test_debugger_probe_new_delete(probe, stuff): with debugger() as d: probe_id = probe.probe_id d.add_probes(probe) @@ -139,7 +133,7 @@ def test_debugger_probe_new_delete(probe, trigger): assert probe in d._probe_registry assert _get_probe_location(probe) in d.__watchdog__._instance._locations - trigger() + stuff.Stuff().instancestuff(42) d.remove_probes(probe) @@ -148,7 +142,7 @@ def test_debugger_probe_new_delete(probe, trigger): assert _get_probe_location(probe) not in d.__watchdog__._instance._locations - trigger() + stuff.Stuff().instancestuff(42) # Unload and reload the module to ensure that the injection hook # has actually been removed. @@ -158,15 +152,15 @@ def test_debugger_probe_new_delete(probe, trigger): __import__("tests.submod.stuff") # Make Stuff refer to the reloaded class - Stuff = sys.modules["tests.submod.stuff"].Stuff + stuff.Stuff = sys.modules["tests.submod.stuff"].Stuff - trigger() + stuff.Stuff().instancestuff(42) (snapshot,) = d.uploader.wait_for_payloads() assert snapshot["debugger"]["snapshot"]["probe"]["id"] == probe_id -def test_debugger_function_probe_on_instance_method(): +def test_debugger_function_probe_on_instance_method(stuff): snapshots = simple_debugger_test( create_snapshot_function_probe( probe_id="probe-instance-method", @@ -174,7 +168,7 @@ def test_debugger_function_probe_on_instance_method(): func_qname="Stuff.instancestuff", condition=None, ), - lambda: Stuff().instancestuff(42), + lambda: stuff.Stuff().instancestuff(42), ) (snapshot,) = snapshots @@ -216,12 +210,30 @@ def test_debugger_function_probe_on_function_with_exception(): return_capture = snapshot_data["captures"]["return"] assert return_capture["arguments"] == {} - assert return_capture["locals"] == {"@exception": {"fields": {}, "type": "Exception"}} + assert return_capture["locals"] == { + "@exception": { + "type": "Exception", + "fields": { + "args": { + "type": "tuple", + "elements": [ + {"type": "str", "value": "'Hello'"}, + {"type": "str", "value": "'world!'"}, + {"type": "int", "value": "42"}, + ], + "size": 3, + }, + "__cause__": {"type": "NoneType", "isNull": True}, + "__context__": {"type": "NoneType", "isNull": True}, + "__suppress_context__": {"type": "bool", "value": "False"}, + }, + } + } assert return_capture["throwable"]["message"] == "'Hello', 'world!', 42" assert return_capture["throwable"]["type"] == "Exception" -def test_debugger_invalid_condition(): +def test_debugger_invalid_condition(stuff): with debugger() as d: d.add_probes( create_snapshot_line_probe( @@ -232,12 +244,12 @@ def test_debugger_invalid_condition(): ), good_probe(), ) - Stuff().instancestuff() + stuff.Stuff().instancestuff() assert all(s["debugger"]["snapshot"]["probe"]["id"] != "foo" for s in d.uploader.wait_for_payloads()) -def test_debugger_conditional_line_probe_on_instance_method(): +def test_debugger_conditional_line_probe_on_instance_method(stuff): snapshots = simple_debugger_test( create_snapshot_line_probe( probe_id="probe-instance-method", @@ -245,7 +257,7 @@ def test_debugger_conditional_line_probe_on_instance_method(): line=36, condition=DDExpression(dsl="True", callable=dd_compile(True)), ), - lambda: Stuff().instancestuff(), + lambda: stuff.Stuff().instancestuff(), ) (snapshot,) = snapshots @@ -258,7 +270,7 @@ def test_debugger_conditional_line_probe_on_instance_method(): assert captures["locals"] == {} -def test_debugger_invalid_line(): +def test_debugger_invalid_line(stuff): with debugger() as d: d.add_probes( create_snapshot_line_probe( @@ -268,13 +280,13 @@ def test_debugger_invalid_line(): ), good_probe(), ) - Stuff().instancestuff() + stuff.Stuff().instancestuff() assert all(s["debugger"]["snapshot"]["probe"]["id"] != "invalidline" for s in d.uploader.wait_for_payloads()) @mock.patch("ddtrace.debugging._debugger.log") -def test_debugger_invalid_source_file(log): +def test_debugger_invalid_source_file(log, stuff): with debugger() as d: d.add_probes( create_snapshot_line_probe( @@ -284,7 +296,7 @@ def test_debugger_invalid_source_file(log): ), good_probe(), ) - Stuff().instancestuff() + stuff.Stuff().instancestuff() log.error.assert_called_once_with( "Cannot inject probe %s: source file %s cannot be resolved", "invalidsource", "tests/submod/bonkers.py" @@ -293,7 +305,7 @@ def test_debugger_invalid_source_file(log): assert all(s["debugger"]["snapshot"]["probe"]["id"] != "invalidsource" for s in d.uploader.wait_for_payloads()) -def test_debugger_decorated_method(): +def test_debugger_decorated_method(stuff): simple_debugger_test( create_snapshot_line_probe( probe_id="probe-decorated-method", @@ -301,7 +313,7 @@ def test_debugger_decorated_method(): line=48, condition=None, ), - Stuff().decoratedstuff, + stuff.Stuff().decoratedstuff, ) @@ -324,7 +336,7 @@ def test_debugger_max_probes(mock_log): mock_log.warning.assert_called_once_with("Too many active probes. Ignoring new ones.") -def test_debugger_tracer_correlation(): +def test_debugger_tracer_correlation(stuff): with debugger() as d: d.add_probes( create_snapshot_line_probe( @@ -338,16 +350,14 @@ def test_debugger_tracer_correlation(): with d._tracer.trace("test-span") as span: trace_id = format_trace_id(span.trace_id) span_id = str(span.span_id) - Stuff().instancestuff() + stuff.Stuff().instancestuff() snapshots = d.uploader.wait_for_payloads() assert all(snapshot["dd"]["trace_id"] == trace_id for snapshot in snapshots) assert all(snapshot["dd"]["span_id"] == span_id for snapshot in snapshots) -def test_debugger_captured_exception(): - from tests.submod import stuff - +def test_debugger_captured_exception(stuff): snapshots = simple_debugger_test( create_snapshot_line_probe( probe_id="captured-exception-test", @@ -364,7 +374,7 @@ def test_debugger_captured_exception(): assert captures["throwable"]["type"] == "Exception" -def test_debugger_multiple_threads(): +def test_debugger_multiple_threads(stuff): with debugger() as d: probes = [ good_probe(), @@ -372,7 +382,7 @@ def test_debugger_multiple_threads(): ] d.add_probes(*probes) - callables = [Stuff().instancestuff, lambda: Stuff().propertystuff] + callables = [stuff.Stuff().instancestuff, lambda: stuff.Stuff().propertystuff] threads = [Thread(target=callables[_ % len(callables)]) for _ in range(10)] for t in threads: @@ -409,59 +419,57 @@ def create_stuff_line_metric_probe(kind, value=None): ) -def test_debugger_metric_probe_simple_count(mock_metrics): +def test_debugger_metric_probe_simple_count(mock_metrics, stuff): with debugger() as d: d.add_probes(create_stuff_line_metric_probe(MetricProbeKind.COUNTER)) - Stuff().instancestuff() + stuff.Stuff().instancestuff() assert ( call("probe.test.counter", 1.0, ["foo:bar", "debugger.probeid:metric-probe-test"]) in mock_metrics.increment.mock_calls ) -def test_debugger_metric_probe_count_value(mock_metrics): +def test_debugger_metric_probe_count_value(mock_metrics, stuff): with debugger() as d: d.add_probes(create_stuff_line_metric_probe(MetricProbeKind.COUNTER, {"ref": "bar"})) - Stuff().instancestuff(40) + stuff.Stuff().instancestuff(40) assert ( call("probe.test.counter", 40.0, ["foo:bar", "debugger.probeid:metric-probe-test"]) in mock_metrics.increment.mock_calls ) -def test_debugger_metric_probe_guage_value(mock_metrics): +def test_debugger_metric_probe_guage_value(mock_metrics, stuff): with debugger() as d: d.add_probes(create_stuff_line_metric_probe(MetricProbeKind.GAUGE, {"ref": "bar"})) - Stuff().instancestuff(41) + stuff.Stuff().instancestuff(41) assert ( call("probe.test.counter", 41.0, ["foo:bar", "debugger.probeid:metric-probe-test"]) in mock_metrics.gauge.mock_calls ) -def test_debugger_metric_probe_histogram_value(mock_metrics): +def test_debugger_metric_probe_histogram_value(mock_metrics, stuff): with debugger() as d: d.add_probes(create_stuff_line_metric_probe(MetricProbeKind.HISTOGRAM, {"ref": "bar"})) - Stuff().instancestuff(42) + stuff.Stuff().instancestuff(42) assert ( call("probe.test.counter", 42.0, ["foo:bar", "debugger.probeid:metric-probe-test"]) in mock_metrics.histogram.mock_calls ) -def test_debugger_metric_probe_distribution_value(mock_metrics): +def test_debugger_metric_probe_distribution_value(mock_metrics, stuff): with debugger() as d: d.add_probes(create_stuff_line_metric_probe(MetricProbeKind.DISTRIBUTION, {"ref": "bar"})) - Stuff().instancestuff(43) + stuff.Stuff().instancestuff(43) assert ( call("probe.test.counter", 43.0, ["foo:bar", "debugger.probeid:metric-probe-test"]) in mock_metrics.distribution.mock_calls ) -def test_debugger_multiple_function_probes_on_same_function(): - global Stuff - +def test_debugger_multiple_function_probes_on_same_function(stuff): probes = [ create_snapshot_function_probe( probe_id="probe-instance-method-%d" % i, @@ -475,9 +483,9 @@ def test_debugger_multiple_function_probes_on_same_function(): with debugger() as d: d.add_probes(*probes) - wrapping_context = DebuggerWrappingContext.extract(Stuff.instancestuff) + wrapping_context = DebuggerWrappingContext.extract(stuff.Stuff.instancestuff) assert wrapping_context.probes == {probe.probe_id: probe for probe in probes} - Stuff().instancestuff(42) + stuff.Stuff().instancestuff(42) d.collector.wait( lambda q: Counter(s.probe.probe_id for s in q) @@ -492,7 +500,7 @@ def test_debugger_multiple_function_probes_on_same_function(): assert "probe-instance-method-1" not in wrapping_context.probes - Stuff().instancestuff(42) + stuff.Stuff().instancestuff(42) d.collector.wait( lambda q: Counter(s.probe.probe_id for s in q) @@ -505,7 +513,7 @@ def test_debugger_multiple_function_probes_on_same_function(): d.remove_probes(probes[0], probes[2]) - Stuff().instancestuff(42) + stuff.Stuff().instancestuff(42) assert Counter(s.probe.probe_id for s in d.test_queue) == { "probe-instance-method-0": 2, @@ -514,12 +522,10 @@ def test_debugger_multiple_function_probes_on_same_function(): } with pytest.raises(AttributeError): - Stuff.instancestuff.__dd_wrappers__ + stuff.Stuff.instancestuff.__dd_wrappers__ def test_debugger_multiple_function_probes_on_same_lazy_module(): - sys.modules.pop("tests.submod.stuff", None) - probes = [ create_snapshot_function_probe( probe_id="probe-instance-method-%d" % i, diff --git a/tests/debugging/test_encoding.py b/tests/debugging/test_encoding.py index c06e5000ed8..c22851f1112 100644 --- a/tests/debugging/test_encoding.py +++ b/tests/debugging/test_encoding.py @@ -191,7 +191,21 @@ def _(): exc = context.pop("throwable") assert context["arguments"] == {} - assert context["locals"] == {"@exception": {"type": "Exception", "fields": {}}} + assert context["locals"] == { + "@exception": { + "type": "Exception", + "fields": { + "args": { + "type": "tuple", + "elements": [{"type": "str", "value": "'test'"}, {"type": "str", "value": "'me'"}], + "size": 2, + }, + "__cause__": {"type": "NoneType", "isNull": True}, + "__context__": {"type": "NoneType", "isNull": True}, + "__suppress_context__": {"type": "bool", "value": "False"}, + }, + } + } assert exc["message"] == "'test', 'me'" assert exc["type"] == "Exception" diff --git a/tests/internal/crashtracker/test_crashtracker.py b/tests/internal/crashtracker/test_crashtracker.py index ed338ce95bb..a4074745f83 100644 --- a/tests/internal/crashtracker/test_crashtracker.py +++ b/tests/internal/crashtracker/test_crashtracker.py @@ -506,6 +506,7 @@ def test_crashtracker_user_tags_envvar(run_python_code_in_subprocess): @pytest.mark.skipif(not sys.platform.startswith("linux"), reason="Linux only") +@pytest.mark.skipif(sys.version_info > (3, 12), reason="Fails on 3.13") def test_crashtracker_set_tag_profiler_config(run_python_code_in_subprocess): port, sock = utils.crashtracker_receiver_bind() assert sock diff --git a/tests/internal/symbol_db/test_symbols.py b/tests/internal/symbol_db/test_symbols.py index a97f6c5bcee..56fa45b3edc 100644 --- a/tests/internal/symbol_db/test_symbols.py +++ b/tests/internal/symbol_db/test_symbols.py @@ -1,5 +1,6 @@ from importlib.machinery import ModuleSpec from pathlib import Path +import sys from types import ModuleType import typing as t @@ -22,6 +23,7 @@ def foo(a, b, c=None): assert {s.name for s in symbols if s.symbol_type == SymbolType.LOCAL} == {"loc"} +@pytest.mark.skipif(sys.version_info > (3, 12), reason="fails on 3.13") def test_symbols_class(): class Sup: pass diff --git a/tests/internal/test_forksafe.py b/tests/internal/test_forksafe.py index e9c5a42c9ef..f9a32f460c5 100644 --- a/tests/internal/test_forksafe.py +++ b/tests/internal/test_forksafe.py @@ -1,5 +1,6 @@ from collections import Counter import os +import sys import pytest @@ -299,6 +300,7 @@ def fn(): assert exit_code == 42 +@pytest.mark.skipif(sys.version_info > (3, 12), reason="fails on 3.13") @pytest.mark.subprocess( out=lambda _: Counter(_) == {"C": 3, "T": 4}, err=None, diff --git a/tests/internal/test_injection.py b/tests/internal/test_injection.py index 3b74c589d62..871726620a8 100644 --- a/tests/internal/test_injection.py +++ b/tests/internal/test_injection.py @@ -1,4 +1,5 @@ from contextlib import contextmanager +import sys import mock import pytest @@ -205,6 +206,7 @@ def test_inject_in_loop(): assert hook.call_count == n +@pytest.mark.skipif(sys.version_info > (3, 12), reason="Fails on 3.13") def test_inject_in_generator(): lo = next(iter(linenos(generator_target))) hook = mock.Mock() diff --git a/tests/internal/test_wrapping.py b/tests/internal/test_wrapping.py index d27eadac43b..7c9a071545d 100644 --- a/tests/internal/test_wrapping.py +++ b/tests/internal/test_wrapping.py @@ -95,6 +95,7 @@ def f(a, b, c=None): assert not channel1 and not channel2 +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") def test_wrap_generator(): channel = [] @@ -116,6 +117,7 @@ def g(): assert list(g()) == list(range(10)) == channel +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") def test_wrap_generator_send(): def wrapper(f, args, kwargs): return f(*args, **kwargs) @@ -142,6 +144,7 @@ def g(): assert list(range(10)) == channel +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") def test_wrap_generator_throw_close(): def wrapper_maker(channel): def wrapper(f, args, kwargs): @@ -215,6 +218,7 @@ def f(): assert [frame.f_code.co_name for frame in f()[:4]] == ["f", "wrapper", "f", "test_wrap_stack"] +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") @pytest.mark.asyncio async def test_wrap_async_context_manager_exception_on_exit(): def wrapper(f, args, kwargs): @@ -231,6 +235,7 @@ async def g(): await acm.__aexit__(ValueError, None, None) +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") def test_wrap_generator_yield_from(): channel = [] @@ -304,6 +309,7 @@ def wrapper(f, args, kwargs): assert f(1, path="bar", foo="baz") == (1, (), "bar", {"foo": "baz"}) +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") @pytest.mark.asyncio async def test_async_generator(): async def stream(): @@ -340,6 +346,7 @@ async def agwrapper(f, args, kwargs): assert awrapper_called +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") @pytest.mark.asyncio async def test_wrap_async_generator_send(): def wrapper(f, args, kwargs): @@ -372,6 +379,7 @@ async def consume(): await consume() +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") @pytest.mark.asyncio async def test_double_async_for_with_exception(): channel = None @@ -416,6 +424,7 @@ async def stream(): b"".join([_ async for _ in s]) +@pytest.mark.skipif(sys.version_info > (3, 12), reason="segfault on 3.13") @pytest.mark.asyncio async def test_wrap_async_generator_throw_close(): channel = [] diff --git a/tests/llmobs/_utils.py b/tests/llmobs/_utils.py index d39e69808fb..0ecdde36ee6 100644 --- a/tests/llmobs/_utils.py +++ b/tests/llmobs/_utils.py @@ -111,8 +111,7 @@ def _expected_llmobs_llm_span_event( meta_dict.update({"model_name": model_name}) if model_provider is not None: meta_dict.update({"model_provider": model_provider}) - if metadata is not None: - meta_dict.update({"metadata": metadata}) + meta_dict.update({"metadata": metadata or {}}) if parameters is not None: meta_dict["input"].update({"parameters": parameters}) span_event["meta"].update(meta_dict) @@ -163,8 +162,7 @@ def _expected_llmobs_non_llm_span_event( meta_dict["input"].update({"value": input_value}) if parameters is not None: meta_dict["input"].update({"parameters": parameters}) - if metadata is not None: - meta_dict.update({"metadata": metadata}) + meta_dict.update({"metadata": metadata or {}}) if output_value is not None: meta_dict["output"].update({"value": output_value}) if not meta_dict["input"]: @@ -324,6 +322,45 @@ def _chat_completion_event(): } +def _chat_completion_event_with_unserializable_field(): + return { + "span_id": "12345678902", + "trace_id": "98765432102", + "parent_id": "", + "session_id": "98765432102", + "name": "chat_completion_span", + "tags": ["version:", "env:", "service:tests.llmobs", "source:integration"], + "start_ns": 1707763310981223936, + "duration": 12345678900, + "error": 0, + "meta": { + "span.kind": "llm", + "model_name": "gpt-3.5-turbo", + "model_provider": "openai", + "metadata": {"unserializable": object()}, + "input": { + "messages": [ + { + "role": "system", + "content": "You are an evil dark lord looking for his one ring to rule them all", + }, + {"role": "user", "content": "I am a hobbit looking to go to Mordor"}, + ], + "parameters": {"temperature": 0.9, "max_tokens": 256}, + }, + "output": { + "messages": [ + { + "content": "Ah, a bold and foolish hobbit seeking to challenge my dominion in Mordor. Very well, little creature, I shall play along. But know that I am always watching, and your quest will not go unnoticed", # noqa: E501 + "role": "assistant", + }, + ] + }, + }, + "metrics": {"input_tokens": 64, "output_tokens": 128, "total_tokens": 192}, + } + + def _large_event(): return { "span_id": "12345678903", @@ -552,6 +589,7 @@ def _expected_ragas_spans(ragas_inputs=None): "span.kind": "workflow", "input": {"value": mock.ANY}, "output": {"value": mock.ANY}, + "metadata": {}, }, "metrics": {}, "tags": expected_ragas_trace_tags(), @@ -568,6 +606,7 @@ def _expected_ragas_spans(ragas_inputs=None): "span.kind": "workflow", "input": {"value": mock.ANY}, "output": {"value": mock.ANY}, + "metadata": {}, }, "metrics": {}, "tags": expected_ragas_trace_tags(), @@ -580,7 +619,7 @@ def _expected_ragas_spans(ragas_inputs=None): "start_ns": mock.ANY, "duration": mock.ANY, "status": "ok", - "meta": {"span.kind": "task"}, + "meta": {"span.kind": "task", "metadata": {}}, "metrics": {}, "tags": expected_ragas_trace_tags(), }, @@ -596,6 +635,7 @@ def _expected_ragas_spans(ragas_inputs=None): "span.kind": "workflow", "input": {"value": mock.ANY}, "output": {"value": mock.ANY}, + "metadata": {}, }, "metrics": {}, "tags": expected_ragas_trace_tags(), @@ -608,7 +648,7 @@ def _expected_ragas_spans(ragas_inputs=None): "start_ns": mock.ANY, "duration": mock.ANY, "status": "ok", - "meta": {"span.kind": "task"}, + "meta": {"span.kind": "task", "metadata": {}}, "metrics": {}, "tags": expected_ragas_trace_tags(), }, diff --git a/tests/llmobs/conftest.py b/tests/llmobs/conftest.py index 0b0ce8b7964..a7d467b3985 100644 --- a/tests/llmobs/conftest.py +++ b/tests/llmobs/conftest.py @@ -6,6 +6,7 @@ from ddtrace.internal.utils.http import Response from ddtrace.llmobs import LLMObs as llmobs_service from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator +from ddtrace.llmobs._writer import LLMObsSpanWriter from tests.llmobs._utils import logs_vcr from tests.utils import DummyTracer from tests.utils import override_env @@ -212,3 +213,49 @@ def mock_ragas_evaluator(mock_llmobs_eval_metric_writer, ragas): LLMObsMockRagas.return_value = 1.0 yield RagasFaithfulnessEvaluator patcher.stop() + + +@pytest.fixture +def tracer(): + return DummyTracer() + + +@pytest.fixture +def llmobs_env(): + return { + "DD_API_KEY": "", + "DD_LLMOBS_ML_APP": "unnamed-ml-app", + } + + +class TestLLMObsSpanWriter(LLMObsSpanWriter): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.events = [] + + def enqueue(self, event): + self.events.append(event) + + +@pytest.fixture +def llmobs_span_writer(): + yield TestLLMObsSpanWriter(interval=1.0, timeout=1.0) + + +@pytest.fixture +def llmobs(monkeypatch, tracer, llmobs_env, llmobs_span_writer): + for env, val in llmobs_env.items(): + monkeypatch.setenv(env, val) + + # TODO: remove once rest of tests are moved off of global config tampering + with override_global_config(dict(_llmobs_ml_app=llmobs_env.get("DD_LLMOBS_ML_APP"))): + llmobs_service.enable(_tracer=tracer) + llmobs_service._instance._llmobs_span_writer = llmobs_span_writer + llmobs_service._instance._trace_processor._span_writer = llmobs_span_writer + yield llmobs + llmobs_service.disable() + + +@pytest.fixture +def llmobs_events(llmobs, llmobs_span_writer): + return llmobs_span_writer.events diff --git a/tests/llmobs/test_llmobs.py b/tests/llmobs/test_llmobs.py new file mode 100644 index 00000000000..1bae7efe9ed --- /dev/null +++ b/tests/llmobs/test_llmobs.py @@ -0,0 +1,254 @@ +import mock +import pytest + +from ddtrace.ext import SpanTypes +from ddtrace.llmobs import _constants as const +from ddtrace.llmobs._utils import _get_llmobs_parent_id +from ddtrace.llmobs._utils import _get_session_id +from tests.llmobs._utils import _expected_llmobs_llm_span_event + + +@pytest.fixture +def mock_logs(): + with mock.patch("ddtrace.llmobs._trace_processor.log") as mock_logs: + yield mock_logs + + +class TestMLApp: + @pytest.mark.parametrize("llmobs_env", [{"DD_LLMOBS_ML_APP": ""}]) + def test_tag_defaults_to_env_var(self, tracer, llmobs_env, llmobs_events): + """Test that no ml_app defaults to the environment variable DD_LLMOBS_ML_APP.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + assert "ml_app:" in llmobs_events[0]["tags"] + + @pytest.mark.parametrize("llmobs_env", [{"DD_LLMOBS_ML_APP": ""}]) + def test_tag_overrides_env_var(self, tracer, llmobs_env, llmobs_events): + """Test that when ml_app is set on the span, it overrides the environment variable DD_LLMOBS_ML_APP.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.ML_APP, "test-ml-app") + assert "ml_app:test-ml-app" in llmobs_events[0]["tags"] + + def test_propagates_ignore_non_llmobs_spans(self, tracer, llmobs_events): + """ + Test that when ml_app is not set, we propagate from nearest LLMObs ancestor + even if there are non-LLMObs spans in between. + """ + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.ML_APP, "test-ml-app") + with tracer.trace("child_span"): + with tracer.trace("llm_grandchild_span", span_type=SpanTypes.LLM) as grandchild_span: + grandchild_span._set_ctx_item(const.SPAN_KIND, "llm") + with tracer.trace("great_grandchild_span", span_type=SpanTypes.LLM) as great_grandchild_span: + great_grandchild_span._set_ctx_item(const.SPAN_KIND, "llm") + assert len(llmobs_events) == 3 + for llmobs_event in llmobs_events: + assert "ml_app:test-ml-app" in llmobs_event["tags"] + + +def test_set_correct_parent_id(tracer): + """Test that the parent_id is set as the span_id of the nearest LLMObs span in the span's ancestor tree.""" + with tracer.trace("root"): + with tracer.trace("llm_span", span_type=SpanTypes.LLM) as llm_span: + pass + assert _get_llmobs_parent_id(llm_span) is None + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as root_span: + with tracer.trace("child_span") as child_span: + with tracer.trace("llm_span", span_type=SpanTypes.LLM) as grandchild_span: + pass + assert _get_llmobs_parent_id(root_span) is None + assert _get_llmobs_parent_id(child_span) == str(root_span.span_id) + assert _get_llmobs_parent_id(grandchild_span) == str(root_span.span_id) + + +class TestSessionId: + def test_propagate_from_ancestors(self, tracer): + """ + Test that session_id is propagated from the nearest LLMObs span in the span's ancestor tree + if no session_id is not set on the span itself. + """ + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as root_span: + root_span._set_ctx_item(const.SESSION_ID, "test_session_id") + with tracer.trace("child_span"): + with tracer.trace("llm_span", span_type=SpanTypes.LLM) as llm_span: + pass + assert _get_session_id(llm_span) == "test_session_id" + + def test_if_set_manually(self, tracer): + """Test that session_id is extracted from the span if it is already set manually.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as root_span: + root_span._set_ctx_item(const.SESSION_ID, "test_session_id") + with tracer.trace("child_span"): + with tracer.trace("llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SESSION_ID, "test_different_session_id") + assert _get_session_id(llm_span) == "test_different_session_id" + + def test_propagates_ignore_non_llmobs_spans(self, tracer, llmobs_events): + """ + Test that when session_id is not set, we propagate from nearest LLMObs ancestor + even if there are non-LLMObs spans in between. + """ + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.SESSION_ID, "session-123") + with tracer.trace("child_span"): + with tracer.trace("llm_grandchild_span", span_type=SpanTypes.LLM) as grandchild_span: + grandchild_span._set_ctx_item(const.SPAN_KIND, "llm") + with tracer.trace("great_grandchild_span", span_type=SpanTypes.LLM) as great_grandchild_span: + great_grandchild_span._set_ctx_item(const.SPAN_KIND, "llm") + + llm_event, grandchild_event, great_grandchild_event = llmobs_events + assert llm_event["session_id"] == "session-123" + assert grandchild_event["session_id"] == "session-123" + assert great_grandchild_event["session_id"] == "session-123" + + +def test_input_value_is_set(tracer, llmobs_events): + """Test that input value is set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.INPUT_VALUE, "value") + assert llmobs_events[0]["meta"]["input"]["value"] == "value" + + +def test_input_messages_are_set(tracer, llmobs_events): + """Test that input messages are set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.INPUT_MESSAGES, [{"content": "message", "role": "user"}]) + assert llmobs_events[0]["meta"]["input"]["messages"] == [{"content": "message", "role": "user"}] + + +def test_input_parameters_are_set(tracer, llmobs_events): + """Test that input parameters are set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.INPUT_PARAMETERS, {"key": "value"}) + assert llmobs_events[0]["meta"]["input"]["parameters"] == {"key": "value"} + + +def test_output_messages_are_set(tracer, llmobs_events): + """Test that output messages are set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.OUTPUT_MESSAGES, [{"content": "message", "role": "user"}]) + assert llmobs_events[0]["meta"]["output"]["messages"] == [{"content": "message", "role": "user"}] + + +def test_output_value_is_set(tracer, llmobs_events): + """Test that output value is set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.OUTPUT_VALUE, "value") + assert llmobs_events[0]["meta"]["output"]["value"] == "value" + + +def test_prompt_is_set(tracer, llmobs_events): + """Test that prompt is set on the span event if they are present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.INPUT_PROMPT, {"variables": {"var1": "var2"}}) + assert llmobs_events[0]["meta"]["input"]["prompt"] == {"variables": {"var1": "var2"}} + + +def test_prompt_is_not_set_for_non_llm_spans(tracer, llmobs_events): + """Test that prompt is NOT set on the span event if the span is not an LLM span.""" + with tracer.trace("task_span", span_type=SpanTypes.LLM) as task_span: + task_span._set_ctx_item(const.SPAN_KIND, "task") + task_span._set_ctx_item(const.INPUT_VALUE, "ival") + task_span._set_ctx_item(const.INPUT_PROMPT, {"variables": {"var1": "var2"}}) + assert llmobs_events[0]["meta"]["input"].get("prompt") is None + + +def test_metadata_is_set(tracer, llmobs_events): + """Test that metadata is set on the span event if it is present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.METADATA, {"key": "value"}) + assert llmobs_events[0]["meta"]["metadata"] == {"key": "value"} + + +def test_metrics_are_set(tracer, llmobs_events): + """Test that metadata is set on the span event if it is present on the span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.METRICS, {"tokens": 100}) + assert llmobs_events[0]["metrics"] == {"tokens": 100} + + +def test_langchain_span_name_is_set_to_class_name(tracer, llmobs_events): + """Test span names for langchain auto-instrumented spans is set correctly.""" + with tracer.trace(const.LANGCHAIN_APM_SPAN_NAME, resource="expected_name", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + assert llmobs_events[0]["name"] == "expected_name" + + +def test_error_is_set(tracer, llmobs_events): + """Test that error is set on the span event if it is present on the span.""" + with pytest.raises(ValueError): + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + raise ValueError("error") + span_event = llmobs_events[0] + assert span_event["meta"]["error.message"] == "error" + assert "ValueError" in span_event["meta"]["error.type"] + assert 'raise ValueError("error")' in span_event["meta"]["error.stack"] + + +def test_model_provider_defaults_to_custom(tracer, llmobs_events): + """Test that model provider defaults to "custom" if not provided.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.MODEL_NAME, "model_name") + span_event = llmobs_events[0] + assert span_event["meta"]["model_name"] == "model_name" + assert span_event["meta"]["model_provider"] == "custom" + + +def test_model_not_set_if_not_llm_kind_span(tracer, llmobs_events): + """Test that model name and provider not set if non-LLM span.""" + with tracer.trace("root_workflow_span", span_type=SpanTypes.LLM) as span: + span._set_ctx_item(const.SPAN_KIND, "workflow") + span._set_ctx_item(const.MODEL_NAME, "model_name") + span_event = llmobs_events[0] + assert "model_name" not in span_event["meta"] + assert "model_provider" not in span_event["meta"] + + +def test_model_and_provider_are_set(tracer, llmobs_events): + """Test that model and provider are set on the span event if they are present on the LLM-kind span.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span._set_ctx_item(const.SPAN_KIND, "llm") + llm_span._set_ctx_item(const.MODEL_NAME, "model_name") + llm_span._set_ctx_item(const.MODEL_PROVIDER, "model_provider") + span_event = llmobs_events[0] + assert span_event["meta"]["model_name"] == "model_name" + assert span_event["meta"]["model_provider"] == "model_provider" + + +def test_malformed_span_logs_error_instead_of_raising(mock_logs, tracer, llmobs_events): + """Test that a trying to create a span event from a malformed span will log an error instead of crashing.""" + with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + # span does not have SPAN_KIND tag + pass + mock_logs.error.assert_called_once_with( + "Error generating LLMObs span event for span %s, likely due to malformed span", llm_span + ) + assert len(llmobs_events) == 0 + + +def test_processor_only_creates_llmobs_span_event(tracer, llmobs_events): + """Test that the LLMObsTraceProcessor only creates LLMObs span events for LLM span types.""" + with tracer.trace("root_llm_span", service="tests.llmobs", span_type=SpanTypes.LLM) as root_span: + root_span._set_ctx_item(const.SPAN_KIND, "llm") + with tracer.trace("child_span"): + with tracer.trace("llm_span", span_type=SpanTypes.LLM) as grandchild_span: + grandchild_span._set_ctx_item(const.SPAN_KIND, "llm") + expected_grandchild_llmobs_span = _expected_llmobs_llm_span_event(grandchild_span, "llm") + expected_grandchild_llmobs_span["parent_id"] = str(root_span.span_id) + + assert len(llmobs_events) == 2 + assert llmobs_events[0] == _expected_llmobs_llm_span_event(root_span, "llm") + assert llmobs_events[1] == expected_grandchild_llmobs_span diff --git a/tests/llmobs/test_llmobs_decorators.py b/tests/llmobs/test_llmobs_decorators.py index 347fb55f652..e94d72aec64 100644 --- a/tests/llmobs/test_llmobs_decorators.py +++ b/tests/llmobs/test_llmobs_decorators.py @@ -1,5 +1,3 @@ -import json - import mock import pytest @@ -469,7 +467,7 @@ def f(prompt, arg_2, kwarg_1=None, kwarg_2=None): _expected_llmobs_non_llm_span_event( span, decorator_name, - input_value=json.dumps({"prompt": "test_prompt", "arg_2": "arg_2", "kwarg_2": 12345}), + input_value=str({"prompt": "test_prompt", "arg_2": "arg_2", "kwarg_2": 12345}), output_value="test_prompt", session_id="test_session_id", ) @@ -489,7 +487,7 @@ def test_retrieval(query, arg_2, kwarg_1=None, kwarg_2=None): _expected_llmobs_non_llm_span_event( span, "retrieval", - input_value=json.dumps({"query": "test_query", "arg_2": "arg_2", "kwarg_2": 12345}), + input_value=str({"query": "test_query", "arg_2": "arg_2", "kwarg_2": 12345}), session_id="test_session_id", ) ) @@ -880,7 +878,7 @@ def get_next_element(alist): _expected_llmobs_non_llm_span_event( span, "workflow", - input_value=json.dumps({"alist": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]}), + input_value=str({"alist": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]}), error=span.get_tag("error.type"), error_message=span.get_tag("error.message"), error_stack=span.get_tag("error.stack"), diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index 160023f5df7..98748250c3a 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -1,4 +1,3 @@ -import json import os import threading import time @@ -31,6 +30,7 @@ from ddtrace.llmobs._constants import SPAN_KIND from ddtrace.llmobs._constants import SPAN_START_WHILE_DISABLED_WARNING from ddtrace.llmobs._constants import TAGS +from ddtrace.llmobs._llmobs import SUPPORTED_LLMOBS_INTEGRATIONS from ddtrace.llmobs._llmobs import LLMObsTraceProcessor from ddtrace.llmobs.utils import Prompt from tests.llmobs._utils import _expected_llmobs_eval_metric_event @@ -144,6 +144,65 @@ def test_service_enable_already_enabled(mock_logs): mock_logs.debug.assert_has_calls([mock.call("%s already enabled", "LLMObs")]) +@mock.patch("ddtrace.llmobs._llmobs.patch") +def test_service_enable_patches_llmobs_integrations(mock_tracer_patch): + with override_global_config(dict(_dd_api_key="", _llmobs_ml_app="")): + llmobs_service.enable() + mock_tracer_patch.assert_called_once() + kwargs = mock_tracer_patch.call_args[1] + for module in SUPPORTED_LLMOBS_INTEGRATIONS.values(): + assert kwargs[module] is True + llmobs_service.disable() + + +@mock.patch("ddtrace.llmobs._llmobs.patch") +def test_service_enable_does_not_override_global_patch_modules(mock_tracer_patch, monkeypatch): + monkeypatch.setenv("DD_PATCH_MODULES", "openai:false") + with override_global_config(dict(_dd_api_key="", _llmobs_ml_app="")): + llmobs_service.enable() + mock_tracer_patch.assert_called_once() + kwargs = mock_tracer_patch.call_args[1] + for module in SUPPORTED_LLMOBS_INTEGRATIONS.values(): + if module == "openai": + assert kwargs[module] is False + continue + assert kwargs[module] is True + llmobs_service.disable() + + +@mock.patch("ddtrace.llmobs._llmobs.patch") +def test_service_enable_does_not_override_integration_enabled_env_vars(mock_tracer_patch, monkeypatch): + monkeypatch.setenv("DD_TRACE_OPENAI_ENABLED", "false") + with override_global_config(dict(_dd_api_key="", _llmobs_ml_app="")): + llmobs_service.enable() + mock_tracer_patch.assert_called_once() + kwargs = mock_tracer_patch.call_args[1] + for module in SUPPORTED_LLMOBS_INTEGRATIONS.values(): + if module == "openai": + assert kwargs[module] is False + continue + assert kwargs[module] is True + llmobs_service.disable() + + +@mock.patch("ddtrace.llmobs._llmobs.patch") +def test_service_enable_does_not_override_global_patch_config(mock_tracer_patch, monkeypatch): + """Test that _patch_integrations() ensures `DD_PATCH_MODULES` overrides `DD_TRACE__ENABLED`.""" + monkeypatch.setenv("DD_TRACE_OPENAI_ENABLED", "true") + monkeypatch.setenv("DD_TRACE_ANTHROPIC_ENABLED", "false") + monkeypatch.setenv("DD_PATCH_MODULES", "openai:false") + with override_global_config(dict(_dd_api_key="", _llmobs_ml_app="")): + llmobs_service.enable() + mock_tracer_patch.assert_called_once() + kwargs = mock_tracer_patch.call_args[1] + for module in SUPPORTED_LLMOBS_INTEGRATIONS.values(): + if module in ("openai", "anthropic"): + assert kwargs[module] is False + continue + assert kwargs[module] is True + llmobs_service.disable() + + def test_start_span_while_disabled_logs_warning(LLMObs, mock_logs): LLMObs.disable() _ = LLMObs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") @@ -177,15 +236,15 @@ def test_start_span_uses_kind_as_default_name(LLMObs): def test_start_span_with_session_id(LLMObs): with LLMObs.llm(model_name="test_model", session_id="test_session_id") as span: - assert span.get_tag(SESSION_ID) == "test_session_id" + assert span._get_ctx_item(SESSION_ID) == "test_session_id" with LLMObs.tool(session_id="test_session_id") as span: - assert span.get_tag(SESSION_ID) == "test_session_id" + assert span._get_ctx_item(SESSION_ID) == "test_session_id" with LLMObs.task(session_id="test_session_id") as span: - assert span.get_tag(SESSION_ID) == "test_session_id" + assert span._get_ctx_item(SESSION_ID) == "test_session_id" with LLMObs.workflow(session_id="test_session_id") as span: - assert span.get_tag(SESSION_ID) == "test_session_id" + assert span._get_ctx_item(SESSION_ID) == "test_session_id" with LLMObs.agent(session_id="test_session_id") as span: - assert span.get_tag(SESSION_ID) == "test_session_id" + assert span._get_ctx_item(SESSION_ID) == "test_session_id" def test_session_id_becomes_top_level_field(LLMObs, mock_llmobs_span_writer): @@ -211,9 +270,9 @@ def test_llm_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_llm_call" assert span.resource == "llm" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "llm" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "test_provider" + assert span._get_ctx_item(SPAN_KIND) == "llm" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "test_provider" mock_llmobs_span_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "llm", model_name="test_model", model_provider="test_provider") @@ -225,9 +284,9 @@ def test_llm_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_writer): assert span.name == "test_llm_call" assert span.resource == "llm" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "llm" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "test_provider" + assert span._get_ctx_item(SPAN_KIND) == "llm" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "test_provider" mock_llmobs_span_agentless_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "llm", model_name="test_model", model_provider="test_provider") @@ -236,7 +295,7 @@ def test_llm_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_writer): def test_llm_span_no_model_sets_default(LLMObs, mock_llmobs_span_writer): with LLMObs.llm(name="test_llm_call", model_provider="test_provider") as span: - assert span.get_tag(MODEL_NAME) == "custom" + assert span._get_ctx_item(MODEL_NAME) == "custom" mock_llmobs_span_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "llm", model_name="custom", model_provider="test_provider") @@ -248,9 +307,9 @@ def test_default_model_provider_set_to_custom(LLMObs): assert span.name == "test_llm_call" assert span.resource == "llm" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "llm" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "custom" + assert span._get_ctx_item(SPAN_KIND) == "llm" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "custom" def test_tool_span(LLMObs, mock_llmobs_span_writer): @@ -258,7 +317,7 @@ def test_tool_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_tool" assert span.resource == "tool" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "tool" + assert span._get_ctx_item(SPAN_KIND) == "tool" mock_llmobs_span_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "tool")) @@ -267,7 +326,7 @@ def test_tool_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_writer) assert span.name == "test_tool" assert span.resource == "tool" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "tool" + assert span._get_ctx_item(SPAN_KIND) == "tool" mock_llmobs_span_agentless_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "tool")) @@ -276,7 +335,7 @@ def test_task_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_task" assert span.resource == "task" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "task" + assert span._get_ctx_item(SPAN_KIND) == "task" mock_llmobs_span_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "task")) @@ -285,7 +344,7 @@ def test_task_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_writer) assert span.name == "test_task" assert span.resource == "task" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "task" + assert span._get_ctx_item(SPAN_KIND) == "task" mock_llmobs_span_agentless_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "task")) @@ -294,7 +353,7 @@ def test_workflow_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_workflow" assert span.resource == "workflow" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "workflow" + assert span._get_ctx_item(SPAN_KIND) == "workflow" mock_llmobs_span_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "workflow")) @@ -303,7 +362,7 @@ def test_workflow_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_wri assert span.name == "test_workflow" assert span.resource == "workflow" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "workflow" + assert span._get_ctx_item(SPAN_KIND) == "workflow" mock_llmobs_span_agentless_writer.enqueue.assert_called_with(_expected_llmobs_non_llm_span_event(span, "workflow")) @@ -312,7 +371,7 @@ def test_agent_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_agent" assert span.resource == "agent" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "agent" + assert span._get_ctx_item(SPAN_KIND) == "agent" mock_llmobs_span_writer.enqueue.assert_called_with(_expected_llmobs_llm_span_event(span, "agent")) @@ -321,13 +380,13 @@ def test_agent_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_writer assert span.name == "test_agent" assert span.resource == "agent" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "agent" + assert span._get_ctx_item(SPAN_KIND) == "agent" mock_llmobs_span_agentless_writer.enqueue.assert_called_with(_expected_llmobs_llm_span_event(span, "agent")) def test_embedding_span_no_model_sets_default(LLMObs, mock_llmobs_span_writer): with LLMObs.embedding(name="test_embedding", model_provider="test_provider") as span: - assert span.get_tag(MODEL_NAME) == "custom" + assert span._get_ctx_item(MODEL_NAME) == "custom" mock_llmobs_span_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "embedding", model_name="custom", model_provider="test_provider") ) @@ -338,9 +397,9 @@ def test_embedding_default_model_provider_set_to_custom(LLMObs): assert span.name == "test_embedding" assert span.resource == "embedding" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "embedding" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "custom" + assert span._get_ctx_item(SPAN_KIND) == "embedding" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "custom" def test_embedding_span(LLMObs, mock_llmobs_span_writer): @@ -348,9 +407,9 @@ def test_embedding_span(LLMObs, mock_llmobs_span_writer): assert span.name == "test_embedding" assert span.resource == "embedding" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "embedding" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "test_provider" + assert span._get_ctx_item(SPAN_KIND) == "embedding" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "test_provider" mock_llmobs_span_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "embedding", model_name="test_model", model_provider="test_provider") @@ -364,9 +423,9 @@ def test_embedding_span_agentless(AgentlessLLMObs, mock_llmobs_span_agentless_wr assert span.name == "test_embedding" assert span.resource == "embedding" assert span.span_type == "llm" - assert span.get_tag(SPAN_KIND) == "embedding" - assert span.get_tag(MODEL_NAME) == "test_model" - assert span.get_tag(MODEL_PROVIDER) == "test_provider" + assert span._get_ctx_item(SPAN_KIND) == "embedding" + assert span._get_ctx_item(MODEL_NAME) == "test_model" + assert span._get_ctx_item(MODEL_PROVIDER) == "test_provider" mock_llmobs_span_agentless_writer.enqueue.assert_called_with( _expected_llmobs_llm_span_event(span, "embedding", model_name="test_model", model_provider="test_provider") @@ -395,7 +454,7 @@ def test_annotate_finished_span_does_nothing(LLMObs, mock_logs): def test_annotate_parameters(LLMObs, mock_logs): with LLMObs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: LLMObs.annotate(span=span, parameters={"temperature": 0.9, "max_tokens": 50}) - assert json.loads(span.get_tag(INPUT_PARAMETERS)) == {"temperature": 0.9, "max_tokens": 50} + assert span._get_ctx_item(INPUT_PARAMETERS) == {"temperature": 0.9, "max_tokens": 50} mock_logs.warning.assert_called_once_with( "Setting parameters is deprecated, please set parameters and other metadata as tags instead." ) @@ -404,128 +463,92 @@ def test_annotate_parameters(LLMObs, mock_logs): def test_annotate_metadata(LLMObs): with LLMObs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: LLMObs.annotate(span=span, metadata={"temperature": 0.5, "max_tokens": 20, "top_k": 10, "n": 3}) - assert json.loads(span.get_tag(METADATA)) == {"temperature": 0.5, "max_tokens": 20, "top_k": 10, "n": 3} + assert span._get_ctx_item(METADATA) == {"temperature": 0.5, "max_tokens": 20, "top_k": 10, "n": 3} def test_annotate_metadata_wrong_type_raises_warning(LLMObs, mock_logs): with LLMObs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: LLMObs.annotate(span=span, metadata="wrong_metadata") - assert span.get_tag(METADATA) is None + assert span._get_ctx_item(METADATA) is None mock_logs.warning.assert_called_once_with("metadata must be a dictionary of string key-value pairs.") mock_logs.reset_mock() -def test_annotate_metadata_non_serializable_marks_with_placeholder_value(LLMObs): - with LLMObs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: - with mock.patch("ddtrace.llmobs._utils.log") as mock_logs: - LLMObs.annotate(span=span, metadata={"unserializable": object()}) - metadata = json.loads(span.get_tag(METADATA)) - assert metadata is not None - assert "[Unserializable object: " in span_event["tags"] - - -def test_ml_app_tag_overrides_env_var(): - """Test that when ml_app is set on the span, it overrides the environment variable DD_LLMOBS_ML_APP.""" - dummy_tracer = DummyTracer() - with override_global_config(dict(_llmobs_ml_app="")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag_str(SPAN_KIND, "llm") - llm_span.set_tag(ML_APP, "test-ml-app") - tp = LLMObsTraceProcessor(dummy_tracer._writer) - span_event, _ = tp._llmobs_span_event(llm_span) - assert "ml_app:test-ml-app" in span_event["tags"] - - -def test_ml_app_propagates_ignore_non_llmobs_spans(): - """ - Test that when ml_app is not set, we propagate from nearest LLMObs ancestor - even if there are non-LLMObs spans in between. - """ - dummy_tracer = DummyTracer() - with override_global_config(dict(_llmobs_ml_app="")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag_str(SPAN_KIND, "llm") - llm_span.set_tag(ML_APP, "test-ml-app") - with dummy_tracer.trace("child_span"): - with dummy_tracer.trace("llm_grandchild_span", span_type=SpanTypes.LLM) as grandchild_span: - grandchild_span.set_tag_str(SPAN_KIND, "llm") - with dummy_tracer.trace("great_grandchild_span", span_type=SpanTypes.LLM) as great_grandchild_span: - great_grandchild_span.set_tag_str(SPAN_KIND, "llm") - tp = LLMObsTraceProcessor(dummy_tracer._writer) - llm_span_event, _ = tp._llmobs_span_event(llm_span) - grandchild_span_event, _ = tp._llmobs_span_event(grandchild_span) - great_grandchild_span_event, _ = tp._llmobs_span_event(great_grandchild_span) - assert "ml_app:test-ml-app" in llm_span_event["tags"] - assert "ml_app:test-ml-app" in grandchild_span_event["tags"] - assert "ml_app:test-ml-app" in great_grandchild_span_event["tags"] - - -def test_malformed_span_logs_error_instead_of_raising(mock_logs): - """Test that a trying to create a span event from a malformed span will log an error instead of crashing.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - # span does not have SPAN_KIND tag - pass - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - tp.process_trace([llm_span]) - mock_logs.error.assert_called_once_with( - "Error generating LLMObs span event for span %s, likely due to malformed span", llm_span - ) - mock_llmobs_span_writer.enqueue.assert_not_called() - - -def test_model_and_provider_are_set(): - """Test that model and provider are set on the span event if they are present on the LLM-kind span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(MODEL_NAME, "model_name") - llm_span.set_tag(MODEL_PROVIDER, "model_provider") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - span_event, _ = tp._llmobs_span_event(llm_span) - assert span_event["meta"]["model_name"] == "model_name" - assert span_event["meta"]["model_provider"] == "model_provider" - - -def test_model_provider_defaults_to_custom(): - """Test that model provider defaults to "custom" if not provided.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(MODEL_NAME, "model_name") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - span_event, _ = tp._llmobs_span_event(llm_span) - assert span_event["meta"]["model_name"] == "model_name" - assert span_event["meta"]["model_provider"] == "custom" - - -def test_model_not_set_if_not_llm_kind_span(): - """Test that model name and provider not set if non-LLM span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_workflow_span", span_type=SpanTypes.LLM) as span: - span.set_tag(SPAN_KIND, "workflow") - span.set_tag(MODEL_NAME, "model_name") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - span_event, _ = tp._llmobs_span_event(span) - assert "model_name" not in span_event["meta"] - assert "model_provider" not in span_event["meta"] - - -def test_input_messages_are_set(): - """Test that input messages are set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(INPUT_MESSAGES, '[{"content": "message", "role": "user"}]') - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["input"]["messages"] == [ - {"content": "message", "role": "user"} - ] - - -def test_input_value_is_set(): - """Test that input value is set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(INPUT_VALUE, "value") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["input"]["value"] == "value" - - -def test_input_parameters_are_set(): - """Test that input parameters are set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(INPUT_PARAMETERS, '{"key": "value"}') - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["input"]["parameters"] == {"key": "value"} - - -def test_output_messages_are_set(): - """Test that output messages are set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(OUTPUT_MESSAGES, '[{"content": "message", "role": "user"}]') - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["output"]["messages"] == [ - {"content": "message", "role": "user"} - ] - - -def test_output_value_is_set(): - """Test that output value is set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(OUTPUT_VALUE, "value") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["output"]["value"] == "value" - - -def test_prompt_is_set(): - """Test that prompt is set on the span event if they are present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(INPUT_PROMPT, json.dumps({"variables": {"var1": "var2"}})) - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["input"]["prompt"] == {"variables": {"var1": "var2"}} - - -def test_prompt_is_not_set_for_non_llm_spans(): - """Test that prompt is NOT set on the span event if the span is not an LLM span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("task_span", span_type=SpanTypes.LLM) as task_span: - task_span.set_tag(SPAN_KIND, "task") - task_span.set_tag(INPUT_VALUE, "ival") - task_span.set_tag(INPUT_PROMPT, json.dumps({"variables": {"var1": "var2"}})) - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(task_span)[0]["meta"]["input"].get("prompt") is None - - -def test_metadata_is_set(): - """Test that metadata is set on the span event if it is present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(METADATA, '{"key": "value"}') - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["meta"]["metadata"] == {"key": "value"} - - -def test_metrics_are_set(): - """Test that metadata is set on the span event if it is present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - llm_span.set_tag(METRICS, '{"tokens": 100}') - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["metrics"] == {"tokens": 100} - - -def test_langchain_span_name_is_set_to_class_name(): - """Test span names for langchain auto-instrumented spans is set correctly.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with dummy_tracer.trace(LANGCHAIN_APM_SPAN_NAME, resource="expected_name", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - assert tp._llmobs_span_event(llm_span)[0]["name"] == "expected_name" - - -def test_error_is_set(): - """Test that error is set on the span event if it is present on the span.""" - dummy_tracer = DummyTracer() - mock_llmobs_span_writer = mock.MagicMock() - with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): - with pytest.raises(ValueError): - with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span.set_tag(SPAN_KIND, "llm") - raise ValueError("error") - tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) - span_event, _ = tp._llmobs_span_event(llm_span) - assert span_event["meta"]["error.message"] == "error" - assert "ValueError" in span_event["meta"]["error.type"] - assert 'raise ValueError("error")' in span_event["meta"]["error.stack"] diff --git a/tests/profiling/collector/test_threading.py b/tests/profiling/collector/test_threading.py index c6b646a3f98..ae7e7204a68 100644 --- a/tests/profiling/collector/test_threading.py +++ b/tests/profiling/collector/test_threading.py @@ -56,12 +56,12 @@ def test_patch(): lock = threading.Lock collector = collector_threading.ThreadingLockCollector(r) collector.start() - assert lock == collector.original + assert lock == collector._original # wrapt makes this true assert lock == threading.Lock collector.stop() assert lock == threading.Lock - assert collector.original == threading.Lock + assert collector._original == threading.Lock def test_lock_acquire_events(): diff --git a/tests/profiling_v2/collector/test_asyncio.py b/tests/profiling_v2/collector/test_asyncio.py index c29ff7fe92c..f0c9bb625d9 100644 --- a/tests/profiling_v2/collector/test_asyncio.py +++ b/tests/profiling_v2/collector/test_asyncio.py @@ -7,7 +7,6 @@ import pytest from ddtrace import ext -from ddtrace import tracer from ddtrace.internal.datadog.profiling import ddup from ddtrace.profiling.collector import asyncio as collector_asyncio from tests.profiling.collector import pprof_utils @@ -85,7 +84,7 @@ async def test_asyncio_lock_events(self): ], ) - async def test_asyncio_lock_events_tracer(self): + async def test_asyncio_lock_events_tracer(self, tracer): tracer._endpoint_call_counter_span_processor.enable() resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB @@ -103,7 +102,7 @@ async def test_asyncio_lock_events_tracer(self): lock_ctx = asyncio.Lock() # !CREATE! test_asyncio_lock_events_tracer_3 async with lock_ctx: # !ACQUIRE! !RELEASE! test_asyncio_lock_events_tracer_3 pass - ddup.upload() + ddup.upload(tracer=tracer) linenos_1 = get_lock_linenos("test_asyncio_lock_events_tracer_1") linenos_2 = get_lock_linenos("test_asyncio_lock_events_tracer_2") diff --git a/tests/profiling_v2/collector/test_stack.py b/tests/profiling_v2/collector/test_stack.py index 5d9007248bc..af13a1ea237 100644 --- a/tests/profiling_v2/collector/test_stack.py +++ b/tests/profiling_v2/collector/test_stack.py @@ -9,7 +9,6 @@ import pytest from ddtrace import ext -from ddtrace import tracer from ddtrace.internal.datadog.profiling import ddup from ddtrace.profiling.collector import stack from ddtrace.settings.profiling import config @@ -82,7 +81,7 @@ def foo(): @pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_span(stack_v2_enabled, tmp_path): +def test_push_span(stack_v2_enabled, tmp_path, tracer): if sys.version_info[:2] == (3, 7) and stack_v2_enabled: pytest.skip("stack_v2 is not supported on Python 3.7") @@ -111,7 +110,7 @@ def test_push_span(stack_v2_enabled, tmp_path): local_root_span_id = span._local_root.span_id for _ in range(10): time.sleep(0.1) - ddup.upload() + ddup.upload(tracer=tracer) profile = pprof_utils.parse_profile(output_filename) samples = pprof_utils.get_samples_with_label_key(profile, "span id") @@ -129,7 +128,7 @@ def test_push_span(stack_v2_enabled, tmp_path): ) -def test_push_span_unregister_thread(tmp_path, monkeypatch): +def test_push_span_unregister_thread(tmp_path, monkeypatch, tracer): if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") @@ -166,7 +165,7 @@ def target_fun(): t.start() t.join() thread_id = t.ident - ddup.upload() + ddup.upload(tracer=tracer) profile = pprof_utils.parse_profile(output_filename) samples = pprof_utils.get_samples_with_label_key(profile, "span id") @@ -187,7 +186,7 @@ def target_fun(): @pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_non_web_span(stack_v2_enabled, tmp_path): +def test_push_non_web_span(stack_v2_enabled, tmp_path, tracer): if sys.version_info[:2] == (3, 7) and stack_v2_enabled: pytest.skip("stack_v2 is not supported on Python 3.7") @@ -216,7 +215,7 @@ def test_push_non_web_span(stack_v2_enabled, tmp_path): local_root_span_id = span._local_root.span_id for _ in range(10): time.sleep(0.1) - ddup.upload() + ddup.upload(tracer=tracer) profile = pprof_utils.parse_profile(output_filename) samples = pprof_utils.get_samples_with_label_key(profile, "span id") @@ -235,7 +234,7 @@ def test_push_non_web_span(stack_v2_enabled, tmp_path): @pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_span_none_span_type(stack_v2_enabled, tmp_path): +def test_push_span_none_span_type(stack_v2_enabled, tmp_path, tracer): # Test for https://github.com/DataDog/dd-trace-py/issues/11141 if sys.version_info[:2] == (3, 7) and stack_v2_enabled: pytest.skip("stack_v2 is not supported on Python 3.7") @@ -266,7 +265,7 @@ def test_push_span_none_span_type(stack_v2_enabled, tmp_path): local_root_span_id = span._local_root.span_id for _ in range(10): time.sleep(0.1) - ddup.upload() + ddup.upload(tracer=tracer) profile = pprof_utils.parse_profile(output_filename) samples = pprof_utils.get_samples_with_label_key(profile, "span id") @@ -398,7 +397,7 @@ def target_fun(): @pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions are not supported") @pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_exception_collection_trace(stack_v2_enabled, tmp_path): +def test_exception_collection_trace(stack_v2_enabled, tmp_path, tracer): if sys.version_info[:2] == (3, 7) and stack_v2_enabled: pytest.skip("stack_v2 is not supported on Python 3.7") @@ -419,7 +418,7 @@ def test_exception_collection_trace(stack_v2_enabled, tmp_path): except Exception: time.sleep(1) - ddup.upload() + ddup.upload(tracer=tracer) profile = pprof_utils.parse_profile(output_filename) samples = pprof_utils.get_samples_with_label_key(profile, "exception type") diff --git a/tests/profiling_v2/collector/test_threading.py b/tests/profiling_v2/collector/test_threading.py index 5ca09dd8da5..12b84fc9970 100644 --- a/tests/profiling_v2/collector/test_threading.py +++ b/tests/profiling_v2/collector/test_threading.py @@ -8,7 +8,6 @@ import pytest from ddtrace import ext -from ddtrace import tracer from ddtrace.internal.datadog.profiling import ddup from ddtrace.profiling.collector import threading as collector_threading from tests.profiling.collector import pprof_utils @@ -74,16 +73,16 @@ def test_patch(): lock = threading.Lock collector = collector_threading.ThreadingLockCollector(None) collector.start() - assert lock == collector.original + assert lock == collector._original # wrapt makes this true assert lock == threading.Lock collector.stop() assert lock == threading.Lock - assert collector.original == threading.Lock + assert collector._original == threading.Lock @pytest.mark.skipif(not sys.platform.startswith("linux"), reason="only works on linux") -@pytest.mark.subprocess(err=None) +@pytest.mark.subprocess() def test_user_threads_have_native_id(): from os import getpid from threading import Thread @@ -356,7 +355,7 @@ def lockfunc(self): ], ) - def test_lock_events_tracer(self): + def test_lock_events_tracer(self, tracer): tracer._endpoint_call_counter_span_processor.enable() resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB @@ -375,7 +374,7 @@ def test_lock_events_tracer(self): span_id = t.span_id lock2.release() # !RELEASE! test_lock_events_tracer_2 - ddup.upload() + ddup.upload(tracer=tracer) linenos1 = get_lock_linenos("test_lock_events_tracer_1") linenos2 = get_lock_linenos("test_lock_events_tracer_2") @@ -419,7 +418,7 @@ def test_lock_events_tracer(self): ], ) - def test_lock_events_tracer_non_web(self): + def test_lock_events_tracer_non_web(self, tracer): tracer._endpoint_call_counter_span_processor.enable() resource = str(uuid.uuid4()) span_type = ext.SpanTypes.SQL @@ -435,7 +434,7 @@ def test_lock_events_tracer_non_web(self): span_id = t.span_id lock2.release() # !RELEASE! test_lock_events_tracer_non_web - ddup.upload() + ddup.upload(tracer=tracer) linenos2 = get_lock_linenos("test_lock_events_tracer_non_web") @@ -463,7 +462,7 @@ def test_lock_events_tracer_non_web(self): ], ) - def test_lock_events_tracer_late_finish(self): + def test_lock_events_tracer_late_finish(self, tracer): tracer._endpoint_call_counter_span_processor.enable() resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB @@ -482,7 +481,7 @@ def test_lock_events_tracer_late_finish(self): lock2.release() # !RELEASE! test_lock_events_tracer_late_finish_2 span.resource = resource span.finish() - ddup.upload() + ddup.upload(tracer=tracer) linenos1 = get_lock_linenos("test_lock_events_tracer_late_finish_1") linenos2 = get_lock_linenos("test_lock_events_tracer_late_finish_2") @@ -520,7 +519,7 @@ def test_lock_events_tracer_late_finish(self): ], ) - def test_resource_not_collected(self): + def test_resource_not_collected(self, tracer): tracer._endpoint_call_counter_span_processor.enable() resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB @@ -539,7 +538,7 @@ def test_resource_not_collected(self): lock1.release() # !RELEASE! test_resource_not_collected_1 span_id = t.span_id lock2.release() # !RELEASE! test_resource_not_collected_2 - ddup.upload() + ddup.upload(tracer=tracer) linenos1 = get_lock_linenos("test_resource_not_collected_1") linenos2 = get_lock_linenos("test_resource_not_collected_2") diff --git a/tests/profiling_v2/simple_program_pytorch_gpu.py b/tests/profiling_v2/simple_program_pytorch_gpu.py new file mode 100644 index 00000000000..8d846c52de4 --- /dev/null +++ b/tests/profiling_v2/simple_program_pytorch_gpu.py @@ -0,0 +1,42 @@ +import torch +import torch.nn +import torch.optim +from torch.profiler import ProfilerActivity +import torch.utils.data +import torchvision.datasets +import torchvision.models +from torchvision.models import ResNet18_Weights +from torchvision.models import resnet18 +import torchvision.transforms as T + + +def cifar(): + transform = T.Compose([T.Resize(224), T.ToTensor(), T.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) + train_set = torchvision.datasets.CIFAR10(root="./data", train=True, download=True, transform=transform) + train_loader = torch.utils.data.DataLoader(train_set, batch_size=32, shuffle=True) + device = torch.device("cuda") + model = resnet18(weights=ResNet18_Weights.DEFAULT).cuda() + criterion = torch.nn.CrossEntropyLoss() + optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9) + model.train() + + def train(data): + inputs, labels = data[0].to(device=device), data[1].to(device=device) + outputs = model(inputs) + loss = criterion(outputs, labels) + optimizer.zero_grad() + loss.backward() + optimizer.step() + + with torch.profiler.profile( + activities=[ProfilerActivity.CUDA], + ): + for step, batch_data in enumerate(train_loader): + print("step #%d" % step) + if step >= (1 + 1 + 3) * 2: + break + train(batch_data) + + +if __name__ == "__main__": + cifar() diff --git a/tests/profiling_v2/test_pytorch.py b/tests/profiling_v2/test_pytorch.py new file mode 100644 index 00000000000..e50d5b46d55 --- /dev/null +++ b/tests/profiling_v2/test_pytorch.py @@ -0,0 +1,44 @@ +import os +import sys + +import pytest + +from tests.profiling.collector import pprof_utils +from tests.utils import call_program + + +@pytest.mark.skipif(not os.getenv("DD_PROFILING_PYTORCH_ENABLED", False), reason="Not testing pytorch GPU") +def test_call_script_pytorch_gpu(tmp_path, monkeypatch): + filename = str(tmp_path / "pprof") + monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) + monkeypatch.setenv("DD_PROFILING_ENABLED", "1") + monkeypatch.setenv("DD_PROFILING_PYTORCH_ENABLED", "1") + stdout, stderr, exitcode, pid = call_program( + "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program_pytorch_gpu.py") + ) + assert exitcode == 0, f"Profiler exited with code {exitcode}. Stderr: {stderr}" + + profile = pprof_utils.parse_profile(filename) + samples = pprof_utils.get_samples_with_value_type(profile, "gpu-time") + assert len(samples) > 0 + print("number of gpu time samples: ", len(samples)) + print("first sample: ", samples[0]) + + expected_sample = pprof_utils.StackEvent( + locations=[ + pprof_utils.StackLocation( + function_name="Memset (Device)", + filename="unknown-file", + line_no=0, + ), + pprof_utils.StackLocation( + function_name="PYTORCH_DeviceType.CUDA", + filename="unknown-file", + line_no=0, + ), + ], + ) + pprof_utils.assert_profile_has_sample(profile, samples=samples, expected_sample=expected_sample) + + gpu_device_label_samples = pprof_utils.get_samples_with_label_key(profile, "gpu device name") + assert len(gpu_device_label_samples) > 0 diff --git a/tests/snapshots/test_api_fake_runners.test_manual_api_fake_efd_faulty_session.json b/tests/snapshots/test_api_fake_runners.test_manual_api_fake_efd_faulty_session.json index db01cee9ccf..cd01c1a80b8 100644 --- a/tests/snapshots/test_api_fake_runners.test_manual_api_fake_efd_faulty_session.json +++ b/tests/snapshots/test_api_fake_runners.test_manual_api_fake_efd_faulty_session.json @@ -13,7 +13,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -24,7 +24,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -35,18 +35,17 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m1", "test.module_path": "", "test.name": "m1_s1_t1", @@ -54,9 +53,9 @@ "test.status": "pass", "test.suite": "m1_s1", "test.type": "test", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test" }, "metrics": { @@ -64,12 +63,12 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495, + "process_id": 97018, "test.source.end": 2, "test.source.start": 1 }, - "duration": 81875, - "start": 1733391714548202334 + "duration": 80708, + "start": 1734010946928117799 }], [ { @@ -86,7 +85,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -97,7 +96,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -108,27 +107,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m1", "test.module_path": "", "test.name": "m1_s1_t2", "test.status": "pass", "test.suite": "m1_s1", "test.type": "test", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test" }, "metrics": { @@ -136,10 +134,10 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 66625, - "start": 1733391714563523250 + "duration": 76500, + "start": 1734010946944395590 }], [ { @@ -156,7 +154,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -167,7 +165,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -178,13 +176,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.codeowners": "[\"@romain\", \"@romain2\"]", "test.command": "manual_efd_faulty_session", @@ -197,9 +195,9 @@ "test.status": "skip", "test.suite": "m1_s1", "test.type": "test", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test" }, "metrics": { @@ -207,12 +205,12 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495, + "process_id": 97018, "test.source.end": 12, "test.source.start": 4 }, - "duration": 71208, - "start": 1733391714563681417 + "duration": 67583, + "start": 1734010946944573757 }], [ { @@ -229,7 +227,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -240,7 +238,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -251,13 +249,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -269,9 +267,9 @@ "test.status": "skip", "test.suite": "m1_s1", "test.type": "test", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test" }, "metrics": { @@ -279,10 +277,10 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 36083, - "start": 1733391714563825542 + "duration": 29584, + "start": 1734010946944705715 }], [ { @@ -299,7 +297,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -310,7 +308,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -321,13 +319,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -339,9 +337,9 @@ "test.status": "pass", "test.suite": "m1_s1", "test.type": "test", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test" }, "metrics": { @@ -349,10 +347,10 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 35125, - "start": 1733391714563932792 + "duration": 29125, + "start": 1734010946944795424 }], [ { @@ -369,7 +367,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -380,7 +378,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -391,13 +389,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.code_coverage.enabled": "false", "test.command": "manual_efd_faulty_session", @@ -407,7 +405,7 @@ "test.framework_version": "1.0.0", "test.itr.tests_skipping.enabled": "false", "test.status": "pass", - "test_session_id": "15705414272000062156", + "test_session_id": "18323133602450366815", "type": "test_session_end" }, "metrics": { @@ -415,10 +413,10 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 18241542, - "start": 1733391714547670500 + "duration": 20594916, + "start": 1734010946927447174 }, { "name": "test_visibility.module", @@ -434,7 +432,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -445,7 +443,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -456,12 +454,12 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.code_coverage.enabled": "false", "test.command": "manual_efd_faulty_session", @@ -471,8 +469,8 @@ "test.module": "m1", "test.module_path": "", "test.status": "pass", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", "type": "test_module_end" }, "metrics": { @@ -480,8 +478,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1 }, - "duration": 16004833, - "start": 1733391714548140542 + "duration": 17017958, + "start": 1734010946927985882 }, { "name": "test_visibility.suite", @@ -497,7 +495,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -508,7 +506,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -519,12 +517,12 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -533,9 +531,9 @@ "test.module_path": "", "test.status": "pass", "test.suite": "m1_s1", - "test_module_id": "7932912067234031810", - "test_session_id": "15705414272000062156", - "test_suite_id": "845846750843749324", + "test_module_id": "8540260240647316329", + "test_session_id": "18323133602450366815", + "test_suite_id": "12178664196634984526", "type": "test_suite_end" }, "metrics": { @@ -543,8 +541,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1 }, - "duration": 15893417, - "start": 1733391714548171417 + "duration": 16846042, + "start": 1734010946928084757 }, { "name": "test_visibility.module", @@ -560,7 +558,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -571,7 +569,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -582,12 +580,12 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.code_coverage.enabled": "false", "test.command": "manual_efd_faulty_session", @@ -597,8 +595,8 @@ "test.module": "m2", "test.module_path": "", "test.status": "pass", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", "type": "test_module_end" }, "metrics": { @@ -606,8 +604,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1 }, - "duration": 1614125, - "start": 1733391714564197709 + "duration": 2905833, + "start": 1734010946945047549 }, { "name": "test_visibility.suite", @@ -623,7 +621,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -634,7 +632,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -645,12 +643,12 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -659,9 +657,9 @@ "test.module_path": "", "test.status": "pass", "test.suite": "m2_s1", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test_suite_end" }, "metrics": { @@ -669,8 +667,8 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1 }, - "duration": 900042, - "start": 1733391714564224375 + "duration": 2378667, + "start": 1734010946945070465 }, { "name": "test_visibility.suite", @@ -686,7 +684,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -697,7 +695,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -708,12 +706,12 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -722,9 +720,9 @@ "test.module_path": "", "test.status": "pass", "test.suite": "m2_s2", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test_suite_end" }, "metrics": { @@ -732,14 +730,14 @@ "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1 }, - "duration": 559708, - "start": 1733391714565176084 + "duration": 381542, + "start": 1734010946947489007 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t1", + "resource": "m2_s1_t0", "trace_id": 6, "span_id": 1, "parent_id": 0, @@ -750,7 +748,1525 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t0", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 32417, + "start": 1734010946945087257 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t1", + "trace_id": 7, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t1", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 26458, + "start": 1734010946945178674 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t2", + "trace_id": 8, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t2", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 29084, + "start": 1734010946945258340 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t3", + "trace_id": 9, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t3", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 23375, + "start": 1734010946945334507 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t4", + "trace_id": 10, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t4", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 22625, + "start": 1734010946945402174 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t5", + "trace_id": 11, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t5", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21500, + "start": 1734010946945466590 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t6", + "trace_id": 12, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t6", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21000, + "start": 1734010946945528757 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t7", + "trace_id": 13, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t7", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20083, + "start": 1734010946945590132 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t8", + "trace_id": 14, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t8", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21208, + "start": 1734010946945651882 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t9", + "trace_id": 15, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t9", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21625, + "start": 1734010946945713840 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t10", + "trace_id": 16, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t10", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19750, + "start": 1734010946945776090 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t11", + "trace_id": 17, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t11", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21583, + "start": 1734010946945838549 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t12", + "trace_id": 18, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t12", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19625, + "start": 1734010946945909299 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t13", + "trace_id": 19, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t13", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 21792, + "start": 1734010946945968590 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t14", + "trace_id": 20, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t14", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20542, + "start": 1734010946946032757 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t15", + "trace_id": 21, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t15", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19166, + "start": 1734010946946092674 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t16", + "trace_id": 22, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t16", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20375, + "start": 1734010946946156174 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t17", + "trace_id": 23, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t17", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20750, + "start": 1734010946946217424 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t18", + "trace_id": 24, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t18", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19833, + "start": 1734010946946278132 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t19", + "trace_id": 25, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t19", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 23375, + "start": 1734010946946356882 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t20", + "trace_id": 26, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t20", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19959, + "start": 1734010946946424090 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t21", + "trace_id": 27, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t21", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 22208, + "start": 1734010946946483924 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t22", + "trace_id": 28, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -761,7 +2277,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -772,27 +2288,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t1", + "test.name": "m2_s1_t22", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -800,17 +2315,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 34542, - "start": 1733391714564244375 + "duration": 19834, + "start": 1734010946946554215 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t2", - "trace_id": 7, + "resource": "m2_s1_t23", + "trace_id": 29, "span_id": 1, "parent_id": 0, "type": "test", @@ -820,7 +2335,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -831,7 +2346,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -842,27 +2357,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t2", + "test.name": "m2_s1_t23", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -870,17 +2384,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 33542, - "start": 1733391714564346250 + "duration": 20125, + "start": 1734010946946613590 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t3", - "trace_id": 8, + "resource": "m2_s1_t24", + "trace_id": 30, "span_id": 1, "parent_id": 0, "type": "test", @@ -890,7 +2404,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -901,7 +2415,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -912,27 +2426,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t3", + "test.name": "m2_s1_t24", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -940,17 +2453,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 33250, - "start": 1733391714564456125 + "duration": 20334, + "start": 1734010946946676715 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t4", - "trace_id": 9, + "resource": "m2_s1_t25", + "trace_id": 31, "span_id": 1, "parent_id": 0, "type": "test", @@ -960,7 +2473,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -971,7 +2484,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -982,27 +2495,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t4", + "test.name": "m2_s1_t25", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1010,17 +2522,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 32250, - "start": 1733391714564552959 + "duration": 19000, + "start": 1734010946946737507 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t5", - "trace_id": 10, + "resource": "m2_s1_t26", + "trace_id": 32, "span_id": 1, "parent_id": 0, "type": "test", @@ -1030,7 +2542,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1041,7 +2553,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1052,27 +2564,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t5", + "test.name": "m2_s1_t26", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1080,17 +2591,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 30417, - "start": 1733391714564645042 + "duration": 20083, + "start": 1734010946946795382 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t6", - "trace_id": 11, + "resource": "m2_s1_t27", + "trace_id": 33, "span_id": 1, "parent_id": 0, "type": "test", @@ -1100,7 +2611,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1111,7 +2622,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1122,27 +2633,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t6", + "test.name": "m2_s1_t27", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1150,17 +2660,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 31583, - "start": 1733391714564735834 + "duration": 33667, + "start": 1734010946946856965 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t7", - "trace_id": 12, + "resource": "m2_s1_t28", + "trace_id": 34, "span_id": 1, "parent_id": 0, "type": "test", @@ -1170,7 +2680,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1181,7 +2691,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1192,27 +2702,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t7", + "test.name": "m2_s1_t28", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1220,17 +2729,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 31666, - "start": 1733391714564827459 + "duration": 22958, + "start": 1734010946946935049 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t8", - "trace_id": 13, + "resource": "m2_s1_t29", + "trace_id": 35, "span_id": 1, "parent_id": 0, "type": "test", @@ -1240,7 +2749,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1251,7 +2760,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1262,27 +2771,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t8", + "test.name": "m2_s1_t29", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1290,17 +2798,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 31750, - "start": 1733391714564918667 + "duration": 20875, + "start": 1734010946947001382 }], [ { "name": "test_visibility.test", "service": "test-test", - "resource": "m2_s1_t9", - "trace_id": 14, + "resource": "m2_s1_t30", + "trace_id": 36, "span_id": 1, "parent_id": 0, "type": "test", @@ -1310,7 +2818,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1321,7 +2829,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1332,27 +2840,302 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", - "test.name": "m2_s1_t9", + "test.name": "m2_s1_t30", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 19292, + "start": 1734010946947063965 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t31", + "trace_id": 37, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t31", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20750, + "start": 1734010946947123215 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t32", + "trace_id": 38, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t32", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 22792, + "start": 1734010946947226215 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t33", + "trace_id": 39, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t33", + "test.status": "pass", + "test.suite": "m2_s1", + "test.type": "test", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", + "type": "test" + }, + "metrics": { + "_dd.py.partial_flush": 1, + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 97018 + }, + "duration": 20334, + "start": 1734010946947289840 + }], +[ + { + "name": "test_visibility.test", + "service": "test-test", + "resource": "m2_s1_t34", + "trace_id": 40, + "span_id": 1, + "parent_id": 0, + "type": "test", + "error": 0, + "meta": { + "_dd.base_service": "test_manual_api_fake_efd_faulty_session0", + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", + "_dd.origin": "ciapp-test", + "_dd.p.dm": "-0", + "_dd.p.tid": "675ae84200000000", + "ci.job.name": "test-job", + "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", + "ci.node.labels": "[\"runner:test-test-test-test\"]", + "ci.node.name": "14727097", + "ci.pipeline.id": "43949931", + "ci.pipeline.name": "Test/test-test/test-project-path", + "ci.pipeline.number": "14726", + "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", + "ci.provider.name": "gitlab", + "ci.stage.name": "test-stage", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", + "component": "dd_manual_test_fw", + "git.branch": "test.brancn/test_name", + "git.commit.author.date": "2024-09-10T10:11:13+01:00", + "git.commit.author.email": "First.Last@testtest.com", + "git.commit.author.name": "TestFirst TestLast", + "git.commit.message": "test commit message", + "git.commit.sha": "c165eb71ef833b752783b5268f21521fd16f812a", + "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", + "git.tag": "v1.0.0", + "language": "python", + "library_version": "2.18.0.dev111+g4ca600932", + "os.architecture": "aarch64", + "os.platform": "Linux", + "os.version": "6.6.12-linuxkit", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", + "runtime.name": "CPython", + "runtime.version": "3.11.9", + "span.kind": "test", + "test.command": "manual_efd_faulty_session", + "test.framework": "dd_manual_test_fw", + "test.framework_version": "1.0.0", + "test.module": "m2", + "test.module_path": "", + "test.name": "m2_s1_t34", "test.status": "pass", "test.suite": "m2_s1", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "3960011827465066479", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "13136832476816543943", "type": "test" }, "metrics": { @@ -1360,17 +3143,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 32291, - "start": 1733391714565010709 + "duration": 19833, + "start": 1734010946947350132 }], [ { "name": "test_visibility.test", "service": "test-test", "resource": "m2_s2_t1", - "trace_id": 15, + "trace_id": 41, "span_id": 1, "parent_id": 0, "type": "test", @@ -1380,7 +3163,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1391,7 +3174,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1402,13 +3185,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -1420,9 +3203,9 @@ "test.status": "skip", "test.suite": "m2_s2", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test" }, "metrics": { @@ -1430,19 +3213,19 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495, + "process_id": 97018, "test.source.end": 2, "test.source.start": 1 }, - "duration": 44250, - "start": 1733391714565197084 + "duration": 39917, + "start": 1734010946947506132 }], [ { "name": "test_visibility.test", "service": "test-test", "resource": "m2_s2_t2", - "trace_id": 16, + "trace_id": 42, "span_id": 1, "parent_id": 0, "type": "test", @@ -1452,7 +3235,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1463,7 +3246,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1474,27 +3257,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", "test.name": "m2_s2_t2", "test.status": "pass", "test.suite": "m2_s2", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test" }, "metrics": { @@ -1502,17 +3284,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 32666, - "start": 1733391714565305209 + "duration": 21333, + "start": 1734010946947589674 }], [ { "name": "test_visibility.test", "service": "test-test", "resource": "m2_s2_t3", - "trace_id": 17, + "trace_id": 43, "span_id": 1, "parent_id": 0, "type": "test", @@ -1522,7 +3304,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1533,7 +3315,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1544,19 +3326,18 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.codeowners": "[\"@romain\"]", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", "test.name": "m2_s2_t3", @@ -1564,9 +3345,9 @@ "test.status": "pass", "test.suite": "m2_s2", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test" }, "metrics": { @@ -1574,19 +3355,19 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495, + "process_id": 97018, "test.source.end": 12, "test.source.start": 4 }, - "duration": 59625, - "start": 1733391714565399292 + "duration": 37250, + "start": 1734010946947654507 }], [ { "name": "test_visibility.test", "service": "test-test", "resource": "m2_s2_t4", - "trace_id": 18, + "trace_id": 44, "span_id": 1, "parent_id": 0, "type": "test", @@ -1596,7 +3377,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1607,7 +3388,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1618,27 +3399,26 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", "test.framework_version": "1.0.0", - "test.is_new": "true", "test.module": "m2", "test.module_path": "", "test.name": "m2_s2_t4", "test.status": "pass", "test.suite": "m2_s2", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test" }, "metrics": { @@ -1646,17 +3426,17 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 32292, - "start": 1733391714565524000 + "duration": 20250, + "start": 1734010946947733340 }], [ { "name": "test_visibility.test", "service": "test-test", "resource": "m2_s2_t5", - "trace_id": 19, + "trace_id": 45, "span_id": 1, "parent_id": 0, "type": "test", @@ -1666,7 +3446,7 @@ "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://test.test.io/Test/test-test/test-test\",\"CI_PIPELINE_ID\":\"43949931\",\"CI_JOB_ID\":\"633358062\"}", "_dd.origin": "ciapp-test", "_dd.p.dm": "-0", - "_dd.p.tid": "6751756200000000", + "_dd.p.tid": "675ae84200000000", "ci.job.name": "test-job", "ci.job.url": "https://test.test.io/Test/test-test/test-test/-/jobs/633358062", "ci.node.labels": "[\"runner:test-test-test-test\"]", @@ -1677,7 +3457,7 @@ "ci.pipeline.url": "https://test.\u2020est.io/Test/test-\u2020est/test-test/-/pipelines/43949931", "ci.provider.name": "gitlab", "ci.stage.name": "test-stage", - "ci.workspace_path": "/tmp/pytest-of-root/pytest-12/test_manual_api_fake_efd_faulty_session0", + "ci.workspace_path": "/tmp/pytest-of-root/pytest-87/test_manual_api_fake_efd_faulty_session0", "component": "dd_manual_test_fw", "git.branch": "test.brancn/test_name", "git.commit.author.date": "2024-09-10T10:11:13+01:00", @@ -1688,13 +3468,13 @@ "git.repository_url": "https://test.test.io/Test/test-test/test-test.git", "git.tag": "v1.0.0", "language": "python", - "library_version": "2.18.0.dev124+gc03b9e422.d20241205", + "library_version": "2.18.0.dev111+g4ca600932", "os.architecture": "aarch64", "os.platform": "Linux", "os.version": "6.6.12-linuxkit", - "runtime-id": "bdef4ecb6c674245bfc4f6518ff5a773", + "runtime-id": "096f74b542c341ea9f8a4b6947a6a95f", "runtime.name": "CPython", - "runtime.version": "3.9.19", + "runtime.version": "3.11.9", "span.kind": "test", "test.command": "manual_efd_faulty_session", "test.framework": "dd_manual_test_fw", @@ -1705,9 +3485,9 @@ "test.status": "pass", "test.suite": "m2_s2", "test.type": "test", - "test_module_id": "17675353669520667242", - "test_session_id": "15705414272000062156", - "test_suite_id": "599512189588521228", + "test_module_id": "17061851581233455560", + "test_session_id": "18323133602450366815", + "test_suite_id": "6307099272916499859", "type": "test" }, "metrics": { @@ -1715,8 +3495,8 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 32495 + "process_id": 97018 }, - "duration": 30792, - "start": 1733391714565625125 + "duration": 19708, + "start": 1734010946947794757 }]] diff --git a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_body_no_collection_snapshot.json b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_body_no_collection_snapshot.json index 4f093b82c80..847daf52c50 100644 --- a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_body_no_collection_snapshot.json +++ b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_body_no_collection_snapshot.json @@ -10,7 +10,7 @@ "meta": { "_dd.appsec.event_rules.version": "1.13.3", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"nfd-000-006\",\n \"name\": \"Detect failed attempt to fetch sensitive files\",\n \"tags\": {\n \"capec\": \"1000/118/169\",\n \"category\": \"attack_attempt\",\n \"confidence\": \"1\",\n \"cwe\": \"200\",\n \"type\": \"security_scanner\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"^404$\",\n \"parameters\": [\n {\n \"address\": \"server.response.status\",\n \"highlight\": [\n \"404\"\n ],\n \"key_path\": [],\n \"value\": \"404\"\n }\n ]\n },\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"\\\\.(cgi|bat|dll|exe|key|cert|crt|pem|der|pkcs|pkcs|pkcs[0-9]*|nsf|jsa|war|java|class|vb|vba|so|git|svn|hg|cvs)([^a-zA-Z0-9_]|$)\",\n \"parameters\": [\n {\n \"address\": \"server.request.uri.raw\",\n \"highlight\": [\n \".git\"\n ],\n \"key_path\": [],\n \"value\": \"/.git\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.origin": "appsec", "_dd.p.appsec": "1", "_dd.p.dm": "-5", diff --git a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_cookies_no_collection_snapshot.json b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_cookies_no_collection_snapshot.json index cbdf4ac389d..b3f0d82c699 100644 --- a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_cookies_no_collection_snapshot.json +++ b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_cookies_no_collection_snapshot.json @@ -10,7 +10,7 @@ "meta": { "_dd.appsec.event_rules.version": "1.13.3", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"nfd-000-006\",\n \"name\": \"Detect failed attempt to fetch sensitive files\",\n \"tags\": {\n \"capec\": \"1000/118/169\",\n \"category\": \"attack_attempt\",\n \"confidence\": \"1\",\n \"cwe\": \"200\",\n \"type\": \"security_scanner\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"^404$\",\n \"parameters\": [\n {\n \"address\": \"server.response.status\",\n \"highlight\": [\n \"404\"\n ],\n \"key_path\": [],\n \"value\": \"404\"\n }\n ]\n },\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"\\\\.(cgi|bat|dll|exe|key|cert|crt|pem|der|pkcs|pkcs|pkcs[0-9]*|nsf|jsa|war|java|class|vb|vba|so|git|svn|hg|cvs)([^a-zA-Z0-9_]|$)\",\n \"parameters\": [\n {\n \"address\": \"server.request.uri.raw\",\n \"highlight\": [\n \".git\"\n ],\n \"key_path\": [],\n \"value\": \"/.git\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.origin": "appsec", "_dd.p.appsec": "1", "_dd.p.dm": "-5", diff --git a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot.json b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot.json index 4908df4eed0..1cab0e7c25e 100644 --- a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot.json +++ b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot.json @@ -10,7 +10,7 @@ "meta": { "_dd.appsec.event_rules.version": "1.13.3", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"nfd-000-006\",\n \"name\": \"Detect failed attempt to fetch sensitive files\",\n \"tags\": {\n \"capec\": \"1000/118/169\",\n \"category\": \"attack_attempt\",\n \"confidence\": \"1\",\n \"cwe\": \"200\",\n \"type\": \"security_scanner\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"^404$\",\n \"parameters\": [\n {\n \"address\": \"server.response.status\",\n \"highlight\": [\n \"404\"\n ],\n \"key_path\": [],\n \"value\": \"404\"\n }\n ]\n },\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"\\\\.(cgi|bat|dll|exe|key|cert|crt|pem|der|pkcs|pkcs|pkcs[0-9]*|nsf|jsa|war|java|class|vb|vba|so|git|svn|hg|cvs)([^a-zA-Z0-9_]|$)\",\n \"parameters\": [\n {\n \"address\": \"server.request.uri.raw\",\n \"highlight\": [\n \".git\"\n ],\n \"key_path\": [],\n \"value\": \"/.git\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "tests.appsec.appsec", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot_with_errors.json b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot_with_errors.json index 60c918bdc2f..28e1dd48dd3 100644 --- a/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot_with_errors.json +++ b/tests/snapshots/tests.appsec.appsec.test_processor.test_appsec_span_tags_snapshot_with_errors.json @@ -10,7 +10,7 @@ "meta": { "_dd.appsec.event_rules.errors": "{\"missing key 'conditions'\": [\"crs-913-110\"], \"missing key 'tags'\": [\"crs-942-100\"]}", "_dd.appsec.event_rules.version": "5.5.5", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "tests.appsec.appsec", "_dd.p.dm": "-0", "_dd.runtime_family": "python", diff --git a/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_error.json b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_error.json new file mode 100644 index 00000000000..4e0cf3e81b1 --- /dev/null +++ b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_error.json @@ -0,0 +1,36 @@ +[[ + { + "name": "azure.functions.invoke", + "service": "test-func", + "resource": "GET /api/httpgeterror", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "serverless", + "error": 1, + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "6750ad8500000000", + "aas.function.name": "http_get_error", + "aas.function.trigger": "Http", + "component": "azure_functions", + "error.message": "Test Error", + "error.stack": "Traceback (most recent call last):\n File \"/root/project/ddtrace/contrib/internal/azure_functions/patch.py\", line 65, in wrap_function\n res = func(req)\n ^^^^^^^^^\n File \"/root/project/tests/contrib/azure_functions/azure_function_app/function_app.py\", line 19, in http_get_error\n raise Exception(\"Test Error\")\nException: Test Error\n", + "error.type": "builtins.Exception", + "http.method": "GET", + "http.route": "/api/httpgeterror", + "http.url": "http://0.0.0.0:7071/api/httpgeterror", + "http.useragent": "python-httpx/x.xx.x", + "language": "python", + "runtime-id": "d7efb82603894b91af0e18f95bfb40ce", + "span.kind": "server" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 98042 + }, + "duration": 3862875, + "start": 1733340549814399761 + }]] diff --git a/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_ok.json b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_ok.json new file mode 100644 index 00000000000..415678e4dec --- /dev/null +++ b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_get_ok.json @@ -0,0 +1,33 @@ +[[ + { + "name": "azure.functions.invoke", + "service": "test-func", + "resource": "GET /api/httpgetok", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "serverless", + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "6750ad7d00000000", + "aas.function.name": "http_get_ok", + "aas.function.trigger": "Http", + "component": "azure_functions", + "http.method": "GET", + "http.route": "/api/httpgetok", + "http.status_code": "200", + "http.url": "http://0.0.0.0:7071/api/httpgetok?key=val", + "http.useragent": "python-httpx/x.xx.x", + "language": "python", + "runtime-id": "2dd77b70098048f5a6b7d3a7d53d1082", + "span.kind": "server" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 96455 + }, + "duration": 1160792, + "start": 1733340541444015424 + }]] diff --git a/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_post_ok.json b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_post_ok.json new file mode 100644 index 00000000000..44c0491b7a7 --- /dev/null +++ b/tests/snapshots/tests.contrib.azure_functions.test_azure_functions_snapshot.test_http_post_ok.json @@ -0,0 +1,33 @@ +[[ + { + "name": "azure.functions.invoke", + "service": "test-func", + "resource": "POST /api/httppostok", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "serverless", + "meta": { + "_dd.p.dm": "-0", + "_dd.p.tid": "6750ad8e00000000", + "aas.function.name": "http_post_ok", + "aas.function.trigger": "Http", + "component": "azure_functions", + "http.method": "POST", + "http.route": "/api/httppostok", + "http.status_code": "200", + "http.url": "http://0.0.0.0:7071/api/httppostok", + "http.useragent": "python-httpx/x.xx.x", + "language": "python", + "runtime-id": "891babf5be3d4b86bd44163cd50c74b0", + "span.kind": "server" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 99631 + }, + "duration": 293958, + "start": 1733340558198232376 + }]] diff --git a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled.json b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled.json index 344f63429a7..cd37846b283 100644 --- a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled.json +++ b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "1.13.3", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled_attack.json b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled_attack.json index 02956faa875..70652b2a242 100644 --- a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled_attack.json +++ b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_appsec_enabled_attack.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "1.13.3", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"nfd-000-006\",\n \"name\": \"Detect failed attempt to fetch sensitive files\",\n \"tags\": {\n \"capec\": \"1000/118/169\",\n \"category\": \"attack_attempt\",\n \"confidence\": \"1\",\n \"cwe\": \"200\",\n \"type\": \"security_scanner\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"^404$\",\n \"parameters\": [\n {\n \"address\": \"server.response.status\",\n \"highlight\": [\n \"404\"\n ],\n \"key_path\": [],\n \"value\": \"404\"\n }\n ]\n },\n {\n \"operator\": \"match_regex\",\n \"operator_value\": \"\\\\.(cgi|bat|dll|exe|key|cert|crt|pem|der|pkcs|pkcs|pkcs[0-9]*|nsf|jsa|war|java|class|vb|vba|so|git|svn|hg|cvs)([^a-zA-Z0-9_]|$)\",\n \"parameters\": [\n {\n \"address\": \"server.request.uri.raw\",\n \"highlight\": [\n \".git\"\n ],\n \"key_path\": [],\n \"value\": \"/.git\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403.json b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403.json index c5ce0d7abf9..a87fcfe4cac 100644 --- a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403.json +++ b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[{\"rule\":{\"id\":\"blk-001-001\",\"name\":\"Block IP addresses\",\"on_match\":[\"block\"],\"tags\":{\"category\":\"blocking\",\"type\":\"ip_addresses\"}},\"rule_matches\":[{\"operator\":\"ip_match\",\"operator_value\":\"\",\"parameters\":[{\"address\":\"http.client_ip\",\"key_path\":[],\"value\":\"8.8.4.4\",\"highlight\":[\"8.8.4.4\"]}]}],\"span_id\":10192376353237234254}]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403_json.json b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403_json.json index 467d576ad85..e1a05bff80e 100644 --- a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403_json.json +++ b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_match_403_json.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[{\"rule\":{\"id\":\"blk-001-001\",\"name\":\"Block IP addresses\",\"on_match\":[\"block\"],\"tags\":{\"category\":\"blocking\",\"type\":\"ip_addresses\"}},\"rule_matches\":[{\"operator\":\"ip_match\",\"operator_value\":\"\",\"parameters\":[{\"address\":\"http.client_ip\",\"key_path\":[],\"value\":\"8.8.4.4\",\"highlight\":[\"8.8.4.4\"]}]}],\"span_id\":865087550764298227}]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_nomatch_200.json b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_nomatch_200.json index 52a99d13b63..15d1b9c3565 100644 --- a/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_nomatch_200.json +++ b/tests/snapshots/tests.contrib.django.test_django_appsec_snapshots.test_request_ipblock_nomatch_200.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.django.test_django_snapshots.test_middleware_trace_partial_based_view.json b/tests/snapshots/tests.contrib.django.test_django_snapshots.test_middleware_trace_partial_based_view.json index 9b21d7f1b84..8b56757961b 100644 --- a/tests/snapshots/tests.contrib.django.test_django_snapshots.test_middleware_trace_partial_based_view.json +++ b/tests/snapshots/tests.contrib.django.test_django_snapshots.test_middleware_trace_partial_based_view.json @@ -9,7 +9,7 @@ "type": "web", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", "component": "django", @@ -45,7 +45,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -62,7 +62,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -79,7 +79,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -96,7 +96,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -113,7 +113,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -130,7 +130,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -147,7 +147,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -164,7 +164,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -181,7 +181,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -198,7 +198,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -215,7 +215,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -232,7 +232,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -249,7 +249,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -266,7 +266,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -283,7 +283,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -300,7 +300,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -317,7 +317,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -334,7 +334,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -351,7 +351,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -368,7 +368,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -385,7 +385,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -402,7 +402,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -419,7 +419,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -436,7 +436,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, @@ -453,7 +453,7 @@ "type": "", "error": 0, "meta": { - "_dd.base_service": "", + "_dd.base_service": "tests.contrib.django", "_dd.p.tid": "654a694400000000", "component": "django" }, diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env].json index 40295cc8b37..625cad59f0a 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env].json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-001\",\n \"name\": \"Block IP addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"blocking\",\n \"type\": \"ip_addresses\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"ip_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"http.client_ip\",\n \"highlight\": [\n \"8.8.4.4\"\n ],\n \"key_path\": [],\n \"value\": \"8.8.4.4\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env]_220.json index 32d8e8e4dde..c8e20851d2d 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403[flask_appsec_good_rules_env]_220.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-001\",\n \"name\": \"Block IP addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"blocking\",\n \"type\": \"ip_addresses\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"ip_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"http.client_ip\",\n \"highlight\": [\n \"8.8.4.4\"\n ],\n \"key_path\": [],\n \"value\": \"8.8.4.4\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env].json index a8d620726eb..aec0ebaad30 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env].json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-001\",\n \"name\": \"Block IP addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"blocking\",\n \"type\": \"ip_addresses\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"ip_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"http.client_ip\",\n \"highlight\": [\n \"8.8.4.4\"\n ],\n \"key_path\": [],\n \"value\": \"8.8.4.4\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env]_220.json index 9e98ab1fa2d..ce549766664 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_ipblock_match_403_json[flask_appsec_good_rules_env]_220.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-001\",\n \"name\": \"Block IP addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"blocking\",\n \"type\": \"ip_addresses\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"ip_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"http.client_ip\",\n \"highlight\": [\n \"8.8.4.4\"\n ],\n \"key_path\": [],\n \"value\": \"8.8.4.4\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env].json index d9bf74ab521..dbd705248e8 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env].json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env]_220.json index 4e3c507fb24..7503a153364 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_osspawn[flask_appsec_good_rules_env]_220.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env].json index a53a26279ee..168dd987711 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env].json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env]_220.json index 89a294b6e8d..73c4525efbf 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_ossystem[flask_appsec_good_rules_env]_220.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env].json index 5265382cf59..83b9e0bae3e 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env].json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env]_220.json index ae473681205..4fd585c203c 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicatenoshell[flask_appsec_good_rules_env]_220.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env].json index 2eb36d25a38..20b135b759b 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env].json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env]_220.json index 800d73259aa..5fec712f209 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_processexec_subprocesscommunicateshell[flask_appsec_good_rules_env]_220.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env].json index d29fdcda126..96245135611 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env].json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env]_220.json index e099df45417..3166f83bff1 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_200_json[flask_appsec_good_rules_env]_220.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.appsec.event_rules.version": "rules_good", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.p.dm": "-0", "_dd.p.tid": "654a694400000000", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env].json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env].json index 5a0c8e301a5..3e25ed6da8e 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env].json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env].json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-002\",\n \"name\": \"Block User Addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"security_response\",\n \"type\": \"block_user\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"exact_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"usr.id\",\n \"highlight\": [\n \"123456\"\n ],\n \"key_path\": [],\n \"value\": \"123456\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env]_220.json b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env]_220.json index 806e0de6295..41661a28f5a 100644 --- a/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env]_220.json +++ b/tests/snapshots/tests.contrib.flask.test_appsec_flask_snapshot.test_flask_userblock_match_403_json[flask_appsec_good_rules_env]_220.json @@ -11,7 +11,7 @@ "meta": { "_dd.appsec.event_rules.version": "rules_good", "_dd.appsec.json": "{\"triggers\":[\n {\n \"rule\": {\n \"id\": \"blk-001-002\",\n \"name\": \"Block User Addresses\",\n \"on_match\": [\n \"block\"\n ],\n \"tags\": {\n \"category\": \"security_response\",\n \"type\": \"block_user\"\n }\n },\n \"rule_matches\": [\n {\n \"operator\": \"exact_match\",\n \"operator_value\": \"\",\n \"parameters\": [\n {\n \"address\": \"usr.id\",\n \"highlight\": [\n \"123456\"\n ],\n \"key_path\": [],\n \"value\": \"123456\"\n }\n ]\n }\n ]\n }\n]}", - "_dd.appsec.waf.version": "1.21.0", + "_dd.appsec.waf.version": "1.22.0", "_dd.base_service": "", "_dd.origin": "appsec", "_dd.p.appsec": "1", diff --git a/tests/submod/custom_decorated_stuff.py b/tests/submod/custom_decorated_stuff.py new file mode 100644 index 00000000000..1150859b6dd --- /dev/null +++ b/tests/submod/custom_decorated_stuff.py @@ -0,0 +1,17 @@ +class App: + def __init__(self): + self.views = {} + + def route(self, path): + def wrapper(view): + self.views[path] = view + + return wrapper + + +app = App() + + +@app.route("/home") +def home(): + pass diff --git a/tests/suitespec.yml b/tests/suitespec.yml index c6a89720676..41fabd7aa88 100644 --- a/tests/suitespec.yml +++ b/tests/suitespec.yml @@ -151,6 +151,14 @@ components: vendor: - ddtrace/vendor/* suites: + conftest: + parallelism: 1 + paths: + - 'conftest.py' + - '**/conftest.py' + pattern: meta-testing + runner: hatch + snapshot: false ddtracerun: parallelism: 6 paths: @@ -195,6 +203,12 @@ suites: - tests/cache/* runner: riot snapshot: true + slotscheck: + parallelism: 1 + paths: + - 'ddtrace/**/*.py' + runner: hatch + snapshot: false profile: env: DD_TRACE_AGENT_URL: '' diff --git a/tests/telemetry/test_telemetry.py b/tests/telemetry/test_telemetry.py index d767090f6d2..558e9961afc 100644 --- a/tests/telemetry/test_telemetry.py +++ b/tests/telemetry/test_telemetry.py @@ -243,14 +243,12 @@ def test_handled_integration_error(test_agent_session, run_python_code_in_subpro _, stderr, status, _ = run_python_code_in_subprocess(code, env=env) assert status == 0, stderr - expected_stderr = b"failed to import" - assert expected_stderr in stderr + assert b"failed to enable ddtrace support for sqlite3" in stderr integrations_events = test_agent_session.get_events("app-integrations-change", subprocess=True) assert len(integrations_events) == 1 assert ( - integrations_events[0]["payload"]["integrations"][0]["error"] - == "failed to import ddtrace module 'ddtrace.contrib.sqlite3' when patching on import" + integrations_events[0]["payload"]["integrations"][0]["error"] == "module 'sqlite3' has no attribute 'connect'" ) # Get metric containing the integration error diff --git a/tests/telemetry/test_writer.py b/tests/telemetry/test_writer.py index 7718710ff60..bcc3be9e38c 100644 --- a/tests/telemetry/test_writer.py +++ b/tests/telemetry/test_writer.py @@ -356,6 +356,7 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED", "origin": "default", "value": False}, {"name": "DD_HTTP_CLIENT_TAG_QUERY_STRING", "origin": "default", "value": None}, {"name": "DD_IAST_ENABLED", "origin": "default", "value": False}, + {"name": "DD_IAST_MAX_CONCURRENT_REQUESTS", "origin": "default", "value": 2}, {"name": "DD_IAST_REDACTION_ENABLED", "origin": "default", "value": True}, { "name": "DD_IAST_REDACTION_NAME_PATTERN", @@ -380,6 +381,7 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_IAST_REQUEST_SAMPLING", "origin": "default", "value": 30.0}, {"name": "DD_IAST_STACK_TRACE_ENABLED", "origin": "default", "value": True}, {"name": "DD_IAST_TELEMETRY_VERBOSITY", "origin": "default", "value": "INFORMATION"}, + {"name": "DD_IAST_VULNERABILITIES_PER_REQUEST", "origin": "default", "value": 2}, {"name": "DD_INJECT_FORCE", "origin": "env_var", "value": True}, {"name": "DD_INSTRUMENTATION_INSTALL_ID", "origin": "default", "value": None}, {"name": "DD_INSTRUMENTATION_INSTALL_TYPE", "origin": "default", "value": None}, diff --git a/tests/tracer/runtime/test_container.py b/tests/tracer/runtime/test_container.py index f9becf93a41..f3c28dc90e6 100644 --- a/tests/tracer/runtime/test_container.py +++ b/tests/tracer/runtime/test_container.py @@ -307,6 +307,7 @@ def test_get_container_info(file_contents, container_id, node_inode): if file_contents is None: mock_open.side_effect = FileNotFoundError + get_container_info.cache_clear() info = get_container_info() if info is not None: @@ -344,6 +345,7 @@ def test_get_container_info_exception(mock_log, mock_from_line): # DEV: We need at least 1 line for the loop to call `CGroupInfo.from_line` with get_mock_open(read_data="\r\n") as mock_open: # Assert calling `get_container_info()` does not bubble up the exception + get_container_info.cache_clear() assert get_container_info() is None # Assert we called everything we expected diff --git a/tests/tracer/test_propagation.py b/tests/tracer/test_propagation.py index 2e1a299c4d4..61fec650a70 100644 --- a/tests/tracer/test_propagation.py +++ b/tests/tracer/test_propagation.py @@ -7,6 +7,7 @@ import mock import pytest +import ddtrace from ddtrace import tracer as ddtracer from ddtrace._trace._span_link import SpanLink from ddtrace._trace.context import Context @@ -45,6 +46,7 @@ from tests.contrib.fastapi.conftest import test_spans as fastapi_test_spans # noqa:F401 from tests.contrib.fastapi.conftest import tracer # noqa:F401 +from ..utils import override_env from ..utils import override_global_config @@ -318,95 +320,107 @@ def test_extract(tracer): # noqa: F811 assert len(context.get_all_baggage_items()) == 3 +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) def test_asm_standalone_minimum_trace_per_minute_has_no_downstream_propagation( - tracer, appsec_enabled, iast_enabled # noqa: F811 + tracer, sca_enabled, appsec_enabled, iast_enabled # noqa: F811 ): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") - - tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) - try: - headers = { - "x-datadog-trace-id": "1234", - "x-datadog-parent-id": "5678", - "x-datadog-sampling-priority": str(USER_KEEP), - "x-datadog-origin": "synthetics", - "x-datadog-tags": "_dd.p.test=value,any=tag", - "ot-baggage-key1": "value1", - } + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") + + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + + tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) + try: + headers = { + "x-datadog-trace-id": "1234", + "x-datadog-parent-id": "5678", + "x-datadog-sampling-priority": str(USER_KEEP), + "x-datadog-origin": "synthetics", + "x-datadog-tags": "_dd.p.test=value,any=tag", + "ot-baggage-key1": "value1", + } - context = HTTPPropagator.extract(headers) + context = HTTPPropagator.extract(headers) - tracer.context_provider.activate(context) + tracer.context_provider.activate(context) - with tracer.trace("local_root_span0") as span: - # First span should be kept, as we keep 1 per min - assert span.trace_id == 1234 - assert span.parent_id == 5678 - # Priority is unset - assert span.context.sampling_priority is None - assert "_sampling_priority_v1" not in span._metrics - assert span.context.dd_origin == "synthetics" - assert "_dd.p.test" in span.context._meta - assert "_dd.p.appsec" not in span.context._meta + with tracer.trace("local_root_span0") as span: + # First span should be kept, as we keep 1 per min + assert span.trace_id == 1234 + assert span.parent_id == 5678 + # Priority is unset + assert span.context.sampling_priority is None + assert "_sampling_priority_v1" not in span._metrics + assert span.context.dd_origin == "synthetics" + assert "_dd.p.test" in span.context._meta + assert "_dd.p.appsec" not in span.context._meta - next_headers = {} - HTTPPropagator.inject(span.context, next_headers) + next_headers = {} + HTTPPropagator.inject(span.context, next_headers) - # Ensure propagation of headers is interrupted - assert "x-datadog-origin" not in next_headers - assert "x-datadog-tags" not in next_headers - assert "x-datadog-trace-id" not in next_headers - assert "x-datadog-parent-id" not in next_headers - assert "x-datadog-sampling-priority" not in next_headers + # Ensure propagation of headers is interrupted + assert "x-datadog-origin" not in next_headers + assert "x-datadog-tags" not in next_headers + assert "x-datadog-trace-id" not in next_headers + assert "x-datadog-parent-id" not in next_headers + assert "x-datadog-sampling-priority" not in next_headers - # Span priority was unset, but as we keep 1 per min, it should be kept - # Since we have a rate limiter, priorities used are USER_KEEP and USER_REJECT - assert span._metrics["_sampling_priority_v1"] == USER_KEEP + # Span priority was unset, but as we keep 1 per min, it should be kept + # Since we have a rate limiter, priorities used are USER_KEEP and USER_REJECT + assert span._metrics["_sampling_priority_v1"] == USER_KEEP - finally: - tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) + finally: + with override_env({"DD_APPSEC_SCA_ENABLED": "0"}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) def test_asm_standalone_missing_propagation_tags_no_appsec_event_trace_dropped( - tracer, appsec_enabled, iast_enabled # noqa: F811 + tracer, sca_enabled, appsec_enabled, iast_enabled # noqa: F811 ): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") - tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) - try: - with tracer.trace("local_root_span0"): - # First span should be kept, as we keep 1 per min - pass + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() - headers = {} + tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) + try: + with tracer.trace("local_root_span0"): + # First span should be kept, as we keep 1 per min + pass - context = HTTPPropagator.extract(headers) + headers = {} - tracer.context_provider.activate(context) + context = HTTPPropagator.extract(headers) - with tracer.trace("local_root_span") as span: - assert "_dd.p.appsec" not in span.context._meta + tracer.context_provider.activate(context) - next_headers = {} - HTTPPropagator.inject(span.context, next_headers) + with tracer.trace("local_root_span") as span: + assert "_dd.p.appsec" not in span.context._meta - # Ensure propagation of headers takes place as expected - assert "x-datadog-origin" not in next_headers - assert "x-datadog-tags" not in next_headers - assert "x-datadog-trace-id" not in next_headers - assert "x-datadog-parent-id" not in next_headers - assert "x-datadog-sampling-priority" not in next_headers + next_headers = {} + HTTPPropagator.inject(span.context, next_headers) - # Ensure span is dropped (no appsec event upstream or in this span) - assert span._metrics["_sampling_priority_v1"] == USER_REJECT - finally: - tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) + # Ensure propagation of headers takes place as expected + assert "x-datadog-origin" not in next_headers + assert "x-datadog-tags" not in next_headers + assert "x-datadog-trace-id" not in next_headers + assert "x-datadog-parent-id" not in next_headers + assert "x-datadog-sampling-priority" not in next_headers + + # Ensure span is dropped (no appsec event upstream or in this span) + assert span._metrics["_sampling_priority_v1"] == USER_REJECT + finally: + with override_env({"DD_APPSEC_SCA_ENABLED": "0"}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) def test_asm_standalone_missing_propagation_tags_appsec_event_present_trace_kept(tracer): # noqa: F811 @@ -443,58 +457,63 @@ def test_asm_standalone_missing_propagation_tags_appsec_event_present_trace_kept tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) def test_asm_standalone_missing_appsec_tag_no_appsec_event_propagation_resets( - tracer, appsec_enabled, iast_enabled # noqa: F811 + tracer, sca_enabled, appsec_enabled, iast_enabled # noqa: F811 ): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") + + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) + try: + with tracer.trace("local_root_span0"): + # First span should be kept, as we keep 1 per min + pass + + headers = { + "x-datadog-trace-id": "1234", + "x-datadog-parent-id": "5678", + "x-datadog-sampling-priority": str(USER_KEEP), + "x-datadog-origin": "synthetics", + "x-datadog-tags": "_dd.p.test=value,any=tag", + "ot-baggage-key1": "value1", + } - tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) - try: - with tracer.trace("local_root_span0"): - # First span should be kept, as we keep 1 per min - pass + context = HTTPPropagator.extract(headers) - headers = { - "x-datadog-trace-id": "1234", - "x-datadog-parent-id": "5678", - "x-datadog-sampling-priority": str(USER_KEEP), - "x-datadog-origin": "synthetics", - "x-datadog-tags": "_dd.p.test=value,any=tag", - "ot-baggage-key1": "value1", - } + tracer.context_provider.activate(context) - context = HTTPPropagator.extract(headers) + with tracer.trace("local_root_span") as span: + assert span.trace_id == 1234 + assert span.parent_id == 5678 + # Priority is unset + assert span.context.sampling_priority is None + assert "_sampling_priority_v1" not in span._metrics + assert span.context.dd_origin == "synthetics" + assert "_dd.p.test" in span.context._meta + assert "_dd.p.appsec" not in span.context._meta - tracer.context_provider.activate(context) + next_headers = {} + HTTPPropagator.inject(span.context, next_headers) - with tracer.trace("local_root_span") as span: - assert span.trace_id == 1234 - assert span.parent_id == 5678 - # Priority is unset - assert span.context.sampling_priority is None - assert "_sampling_priority_v1" not in span._metrics - assert span.context.dd_origin == "synthetics" - assert "_dd.p.test" in span.context._meta - assert "_dd.p.appsec" not in span.context._meta + # Ensure propagation of headers takes place as expected + assert "x-datadog-origin" not in next_headers + assert "x-datadog-tags" not in next_headers + assert "x-datadog-trace-id" not in next_headers + assert "x-datadog-parent-id" not in next_headers + assert "x-datadog-sampling-priority" not in next_headers - next_headers = {} - HTTPPropagator.inject(span.context, next_headers) - - # Ensure propagation of headers takes place as expected - assert "x-datadog-origin" not in next_headers - assert "x-datadog-tags" not in next_headers - assert "x-datadog-trace-id" not in next_headers - assert "x-datadog-parent-id" not in next_headers - assert "x-datadog-sampling-priority" not in next_headers - - # Priority was unset, and trace is not kept, so it should be dropped - # As we have a rate limiter, priorities used are USER_KEEP and USER_REJECT - assert span._metrics["_sampling_priority_v1"] == USER_REJECT - finally: - tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) + # Priority was unset, and trace is not kept, so it should be dropped + # As we have a rate limiter, priorities used are USER_KEEP and USER_REJECT + assert span._metrics["_sampling_priority_v1"] == USER_REJECT + finally: + with override_env({"DD_APPSEC_SCA_ENABLED": "false"}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) def test_asm_standalone_missing_appsec_tag_appsec_event_present_trace_kept( @@ -546,131 +565,141 @@ def test_asm_standalone_missing_appsec_tag_appsec_event_present_trace_kept( @pytest.mark.parametrize("upstream_priority", ["1", "2"]) +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) def test_asm_standalone_present_appsec_tag_no_appsec_event_propagation_set_to_user_keep( - tracer, upstream_priority, appsec_enabled, iast_enabled # noqa: F811 + tracer, upstream_priority, sca_enabled, appsec_enabled, iast_enabled # noqa: F811 ): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") - - tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) - try: - with tracer.trace("local_root_span0"): - # First span should be kept, as we keep 1 per min - pass - - headers = { - "x-datadog-trace-id": "1234", - "x-datadog-parent-id": "5678", - "x-datadog-sampling-priority": upstream_priority, - "x-datadog-origin": "synthetics", - "x-datadog-tags": "_dd.p.appsec=1,any=tag", - "ot-baggage-key1": "value1", - } + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") + + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) + try: + with tracer.trace("local_root_span0"): + # First span should be kept, as we keep 1 per min + pass + + headers = { + "x-datadog-trace-id": "1234", + "x-datadog-parent-id": "5678", + "x-datadog-sampling-priority": upstream_priority, + "x-datadog-origin": "synthetics", + "x-datadog-tags": "_dd.p.appsec=1,any=tag", + "ot-baggage-key1": "value1", + } - context = HTTPPropagator.extract(headers) + context = HTTPPropagator.extract(headers) - tracer.context_provider.activate(context) + tracer.context_provider.activate(context) - with tracer.trace("local_root_span") as span: - assert span.trace_id == 1234 - assert span.parent_id == 5678 - # Enforced user keep regardless of upstream priority - assert span.context.sampling_priority == USER_KEEP - assert span.context.dd_origin == "synthetics" - assert span.context._meta == { - "_dd.origin": "synthetics", - "_dd.p.dm": "-3", - "_dd.p.appsec": "1", - } - with tracer.trace("child_span") as child_span: - assert child_span.trace_id == 1234 - assert child_span.parent_id != 5678 - assert child_span.context.sampling_priority == USER_KEEP - assert child_span.context.dd_origin == "synthetics" - assert child_span.context._meta == { + with tracer.trace("local_root_span") as span: + assert span.trace_id == 1234 + assert span.parent_id == 5678 + # Enforced user keep regardless of upstream priority + assert span.context.sampling_priority == USER_KEEP + assert span.context.dd_origin == "synthetics" + assert span.context._meta == { "_dd.origin": "synthetics", "_dd.p.dm": "-3", "_dd.p.appsec": "1", } - - next_headers = {} - HTTPPropagator.inject(span.context, next_headers) - assert next_headers["x-datadog-origin"] == "synthetics" - assert next_headers["x-datadog-sampling-priority"] == str(USER_KEEP) - assert next_headers["x-datadog-trace-id"] == "1234" - assert next_headers["x-datadog-tags"].startswith("_dd.p.appsec=1,") - - # Ensure span sets user keep regardless of received priority (appsec event upstream) - assert span._metrics["_sampling_priority_v1"] == USER_KEEP - - finally: - tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) + with tracer.trace("child_span") as child_span: + assert child_span.trace_id == 1234 + assert child_span.parent_id != 5678 + assert child_span.context.sampling_priority == USER_KEEP + assert child_span.context.dd_origin == "synthetics" + assert child_span.context._meta == { + "_dd.origin": "synthetics", + "_dd.p.dm": "-3", + "_dd.p.appsec": "1", + } + + next_headers = {} + HTTPPropagator.inject(span.context, next_headers) + assert next_headers["x-datadog-origin"] == "synthetics" + assert next_headers["x-datadog-sampling-priority"] == str(USER_KEEP) + assert next_headers["x-datadog-trace-id"] == "1234" + assert next_headers["x-datadog-tags"].startswith("_dd.p.appsec=1,") + + # Ensure span sets user keep regardless of received priority (appsec event upstream) + assert span._metrics["_sampling_priority_v1"] == USER_KEEP + + finally: + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) @pytest.mark.parametrize("upstream_priority", ["1", "2"]) +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) def test_asm_standalone_present_appsec_tag_appsec_event_present_propagation_force_keep( - tracer, upstream_priority, appsec_enabled, iast_enabled # noqa: F811 + tracer, upstream_priority, sca_enabled, appsec_enabled, iast_enabled # noqa: F811 ): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") - - tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) - try: - with tracer.trace("local_root_span0"): - # First span should be kept, as we keep 1 per min - pass - - headers = { - "x-datadog-trace-id": "1234", - "x-datadog-parent-id": "5678", - "x-datadog-sampling-priority": upstream_priority, - "x-datadog-origin": "synthetics", - "x-datadog-tags": "_dd.p.appsec=1,any=tag", - "ot-baggage-key1": "value1", - } + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") + + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=appsec_enabled, appsec_standalone_enabled=True, iast_enabled=iast_enabled) + try: + with tracer.trace("local_root_span0"): + # First span should be kept, as we keep 1 per min + pass + + headers = { + "x-datadog-trace-id": "1234", + "x-datadog-parent-id": "5678", + "x-datadog-sampling-priority": upstream_priority, + "x-datadog-origin": "synthetics", + "x-datadog-tags": "_dd.p.appsec=1,any=tag", + "ot-baggage-key1": "value1", + } - context = HTTPPropagator.extract(headers) + context = HTTPPropagator.extract(headers) - tracer.context_provider.activate(context) + tracer.context_provider.activate(context) - with tracer.trace("local_root_span") as span: - _asm_manual_keep(span) - assert span.trace_id == 1234 - assert span.parent_id == 5678 - assert span.context.sampling_priority == USER_KEEP # user keep always - assert span.context.dd_origin == "synthetics" - assert span.context._meta == { - "_dd.origin": "synthetics", - "_dd.p.dm": "-4", - "_dd.p.appsec": "1", - } - with tracer.trace("child_span") as child_span: - assert child_span.trace_id == 1234 - assert child_span.parent_id != 5678 - assert child_span.context.sampling_priority == USER_KEEP # user keep always - assert child_span.context.dd_origin == "synthetics" - assert child_span.context._meta == { + with tracer.trace("local_root_span") as span: + _asm_manual_keep(span) + assert span.trace_id == 1234 + assert span.parent_id == 5678 + assert span.context.sampling_priority == USER_KEEP # user keep always + assert span.context.dd_origin == "synthetics" + assert span.context._meta == { "_dd.origin": "synthetics", "_dd.p.dm": "-4", "_dd.p.appsec": "1", } - - next_headers = {} - HTTPPropagator.inject(span.context, next_headers) - assert next_headers["x-datadog-origin"] == "synthetics" - assert next_headers["x-datadog-sampling-priority"] == str(USER_KEEP) # user keep always - assert next_headers["x-datadog-trace-id"] == "1234" - assert next_headers["x-datadog-tags"].startswith("_dd.p.appsec=1,") - - # Ensure span set to user keep regardless received priority (appsec event upstream) - assert span._metrics["_sampling_priority_v1"] == USER_KEEP # user keep always - - finally: - tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) + with tracer.trace("child_span") as child_span: + assert child_span.trace_id == 1234 + assert child_span.parent_id != 5678 + assert child_span.context.sampling_priority == USER_KEEP # user keep always + assert child_span.context.dd_origin == "synthetics" + assert child_span.context._meta == { + "_dd.origin": "synthetics", + "_dd.p.dm": "-4", + "_dd.p.appsec": "1", + } + + next_headers = {} + HTTPPropagator.inject(span.context, next_headers) + assert next_headers["x-datadog-origin"] == "synthetics" + assert next_headers["x-datadog-sampling-priority"] == str(USER_KEEP) # user keep always + assert next_headers["x-datadog-trace-id"] == "1234" + assert next_headers["x-datadog-tags"].startswith("_dd.p.appsec=1,") + + # Ensure span set to user keep regardless received priority (appsec event upstream) + assert span._metrics["_sampling_priority_v1"] == USER_KEEP # user keep always + + finally: + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, appsec_standalone_enabled=False) def test_extract_with_baggage_http_propagation(tracer): # noqa: F811 diff --git a/tests/tracer/test_tracer.py b/tests/tracer/test_tracer.py index f432403d3f9..4cdcf876aba 100644 --- a/tests/tracer/test_tracer.py +++ b/tests/tracer/test_tracer.py @@ -2043,30 +2043,38 @@ def test_import_ddtrace_tracer_not_module(): assert isinstance(tracer, Tracer) +@pytest.mark.parametrize("sca_enabled", ["true", "false"]) @pytest.mark.parametrize("appsec_enabled", [True, False]) @pytest.mark.parametrize("iast_enabled", [True, False]) -def test_asm_standalone_configuration(appsec_enabled, iast_enabled): - if not appsec_enabled and not iast_enabled: - pytest.skip("AppSec or IAST must be enabled") +def test_asm_standalone_configuration(sca_enabled, appsec_enabled, iast_enabled): + if not appsec_enabled and not iast_enabled and sca_enabled == "false": + pytest.skip("SCA, AppSec or IAST must be enabled") + + with override_env({"DD_APPSEC_SCA_ENABLED": sca_enabled}): + ddtrace.config._reset() + tracer = ddtrace.Tracer() + tracer.configure(appsec_enabled=appsec_enabled, iast_enabled=iast_enabled, appsec_standalone_enabled=True) + if appsec_enabled: + assert tracer._asm_enabled is True + if iast_enabled: + assert tracer._iast_enabled is True + if sca_enabled == "true": + assert bool(ddtrace.config._sca_enabled) is True + + assert tracer._appsec_standalone_enabled is True + assert tracer._apm_opt_out is True + assert tracer.enabled is False + + assert isinstance(tracer._sampler.limiter, RateLimiter) + assert tracer._sampler.limiter.rate_limit == 1 + assert tracer._sampler.limiter.time_window == 60e9 + + assert tracer._compute_stats is False - tracer = ddtrace.Tracer() - tracer.configure(appsec_enabled=appsec_enabled, iast_enabled=iast_enabled, appsec_standalone_enabled=True) - if appsec_enabled: - assert tracer._asm_enabled is True - if iast_enabled: - assert tracer._iast_enabled is True - - assert tracer._appsec_standalone_enabled is True - assert tracer._apm_opt_out is True - assert tracer.enabled is False - - assert isinstance(tracer._sampler.limiter, RateLimiter) - assert tracer._sampler.limiter.rate_limit == 1 - assert tracer._sampler.limiter.time_window == 60e9 - - assert tracer._compute_stats is False # reset tracer values - tracer.configure(appsec_enabled=False, iast_enabled=False, appsec_standalone_enabled=False) + with override_env({"DD_APPSEC_SCA_ENABLED": "false"}): + ddtrace.config._reset() + tracer.configure(appsec_enabled=False, iast_enabled=False, appsec_standalone_enabled=False) def test_gc_not_used_on_root_spans(): diff --git a/tests/wait-for-services.py b/tests/wait-for-services.py index 2f3fc29e7b3..048cb6948a8 100644 --- a/tests/wait-for-services.py +++ b/tests/wait-for-services.py @@ -1,10 +1,16 @@ +import logging +import os import sys import time +import typing as t from cassandra.cluster import Cluster from cassandra.cluster import NoHostAvailable from contrib.config import CASSANDRA_CONFIG +from contrib.config import ELASTICSEARCH_CONFIG +from contrib.config import HTTPBIN_CONFIG from contrib.config import MYSQL_CONFIG +from contrib.config import OPENSEARCH_CONFIG from contrib.config import POSTGRES_CONFIG from contrib.config import RABBITMQ_CONFIG from contrib.config import VERTICA_CONFIG @@ -12,72 +18,83 @@ import mysql.connector from psycopg2 import OperationalError from psycopg2 import connect +import requests import vertica_python -def try_until_timeout(exception): +logging.basicConfig(level=logging.INFO) +log = logging.getLogger(__name__) + + +def try_until_timeout(exception, tries: int = 100, timeout: float = 0.2, args: t.Optional[t.Dict[str, t.Any]] = None): """Utility decorator that tries to call a check until there is a timeout. The default timeout is about 20 seconds. """ + if not args: + args = {} def wrap(fn): - def wrapper(*args, **kwargs): + def wrapper(**kwargs): err = None - for _ in range(100): + _kwargs = args.copy() + _kwargs.update(kwargs) + + for i in range(tries): try: - fn() + log.info("Attempt %d: %s(%r)", i, fn.__name__, _kwargs) + fn(**_kwargs) except exception as e: err = e - time.sleep(0.2) + time.sleep(timeout) else: break else: if err: raise err + log.info("Succeeded: %s", fn.__name__) return wrapper return wrap -@try_until_timeout(OperationalError) -def check_postgres(): - conn = connect(**POSTGRES_CONFIG) +@try_until_timeout(OperationalError, args={"pg_config": POSTGRES_CONFIG}) +def check_postgres(pg_config): + conn = connect(**pg_config) try: conn.cursor().execute("SELECT 1;") finally: conn.close() -@try_until_timeout(NoHostAvailable) -def check_cassandra(): - with Cluster(**CASSANDRA_CONFIG).connect() as conn: +@try_until_timeout(NoHostAvailable, args={"cassandra_config": CASSANDRA_CONFIG}) +def check_cassandra(cassandra_config): + with Cluster(**cassandra_config).connect() as conn: conn.execute("SELECT now() FROM system.local") -@try_until_timeout(Exception) -def check_mysql(): - conn = mysql.connector.connect(**MYSQL_CONFIG) +@try_until_timeout(Exception, args={"mysql_config": MYSQL_CONFIG}) +def check_mysql(mysql_config): + conn = mysql.connector.connect(**mysql_config) try: conn.cursor().execute("SELECT 1;") finally: conn.close() -@try_until_timeout(Exception) -def check_vertica(): - conn = vertica_python.connect(**VERTICA_CONFIG) +@try_until_timeout(Exception, args={"vertica_config": VERTICA_CONFIG}) +def check_vertica(vertica_config): + conn = vertica_python.connect(**vertica_config) try: conn.cursor().execute("SELECT 1;") finally: conn.close() -@try_until_timeout(Exception) -def check_rabbitmq(): - url = "amqp://{user}:{password}@{host}:{port}//".format(**RABBITMQ_CONFIG) +@try_until_timeout(Exception, args={"url": "amqp://{user}:{password}@{host}:{port}//".format(**RABBITMQ_CONFIG)}) +def check_rabbitmq(url): conn = kombu.Connection(url) try: conn.connect() @@ -85,17 +102,52 @@ def check_rabbitmq(): conn.release() +@try_until_timeout(Exception, args={"url": os.environ.get("DD_TRACE_AGENT_URL", "http://localhost:8126")}) +def check_agent(url): + if not url.endswith("/"): + url += "/" + + res = requests.get(url) + if res.status_code not in (404, 200): + raise Exception("Agent not ready") + + +@try_until_timeout(Exception, args={"url": "http://{host}:{port}/".format(**ELASTICSEARCH_CONFIG)}) +def check_elasticsearch(url): + requests.get(url).raise_for_status() + + +@try_until_timeout( + Exception, tries=120, timeout=1, args={"url": "http://{host}:{port}/".format(**OPENSEARCH_CONFIG)} +) # 2 minutes, OpenSearch is slow to start +def check_opensearch(url): + requests.get(url).raise_for_status() + + +@try_until_timeout(Exception, args={"url": "http://{host}:{port}/".format(**HTTPBIN_CONFIG)}) +def check_httpbin(url): + requests.get(url).raise_for_status() + + if __name__ == "__main__": check_functions = { "cassandra": check_cassandra, - "postgres": check_postgres, + "ddagent": check_agent, + "elasticsearch": check_elasticsearch, + "httpbin_local": check_httpbin, "mysql": check_mysql, - "vertica": check_vertica, + "opensearch": check_opensearch, + "postgres": check_postgres, "rabbitmq": check_rabbitmq, + "testagent": check_agent, + "vertica": check_vertica, } if len(sys.argv) >= 2: for service in sys.argv[1:]: - check_functions[service]() + if service not in check_functions: + log.warning("Unknown service: %s", service) + else: + check_functions[service]() else: print("usage: python {} SERVICE_NAME".format(sys.argv[0])) sys.exit(1)